From be737bb141dcfd9b0dc3fc5b62ac6a11c7f220a0 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 7 Dec 2017 16:16:12 -0800 Subject: [PATCH 001/332] Initial commit --- framework/GPUImage.xcodeproj/project.pbxproj | 22 +- framework/Source/BasicOperation.swift | 2 +- framework/Source/Framebuffer.swift | 4 +- framework/Source/FramebufferCache.swift | 2 +- framework/Source/GPUImage-Bridging-Header.h | 14 + framework/Source/ImageOrientation.swift | 2 +- framework/Source/ObjC.h | 14 + framework/Source/ObjC.m | 23 ++ framework/Source/OpenGLContext_Shared.swift | 2 +- framework/Source/OpenGLRendering.swift | 4 +- framework/Source/Pipeline.swift | 19 ++ framework/Source/SerialDispatch.swift | 3 +- framework/Source/ShaderProgram.swift | 2 +- framework/Source/Size.swift | 2 +- framework/Source/iOS/Camera.swift | 30 +- framework/Source/iOS/MovieInput.swift | 58 +++- framework/Source/iOS/MovieOutput.swift | 284 ++++++++++++------- framework/Source/iOS/OpenGLContext.swift | 7 +- framework/Source/iOS/RenderView.swift | 8 +- 19 files changed, 371 insertions(+), 131 deletions(-) create mode 100644 framework/Source/GPUImage-Bridging-Header.h create mode 100644 framework/Source/ObjC.h create mode 100644 framework/Source/ObjC.m diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index a18a613c..9f034e28 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -7,6 +7,10 @@ objects = { /* Begin PBXBuildFile section */ + 1F499A731FDA0F9F0000E37E /* ObjC.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* ObjC.m */; }; + 1F499A741FDA0F9F0000E37E /* ObjC.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* ObjC.m */; }; + 1F499A751FDA0F9F0000E37E /* ObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* ObjC.h */; }; + 1F499A761FDA0F9F0000E37E /* ObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* ObjC.h */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; BC0923A11C92661D00A2ADFA /* Pipeline_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */; }; BC0923A21C92664900A2ADFA /* Framebuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB279EB1C8D11630013E213 /* Framebuffer.swift */; }; @@ -370,6 +374,9 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ + 1F499A711FDA0F9E0000E37E /* ObjC.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = ObjC.m; path = Source/ObjC.m; sourceTree = ""; }; + 1F499A721FDA0F9F0000E37E /* ObjC.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ObjC.h; path = Source/ObjC.h; sourceTree = ""; }; + 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "GPUImage-Bridging-Header.h"; path = "Source/GPUImage-Bridging-Header.h"; sourceTree = ""; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Pipeline_Tests.swift; path = Tests/Pipeline_Tests.swift; sourceTree = SOURCE_ROOT; }; BC1E12F41C9F2FD7008F844F /* ThreeInput.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ThreeInput.vsh; path = Source/Operations/Shaders/ThreeInput.vsh; sourceTree = ""; }; @@ -961,8 +968,11 @@ BC6E7CAD1C39A9D8006DF678 /* Other */ = { isa = PBXGroup; children = ( + 1F499A721FDA0F9F0000E37E /* ObjC.h */, + 1F499A711FDA0F9E0000E37E /* ObjC.m */, BC4C85ED1C9F042900FD95D8 /* ConvertedShaders_GL.swift */, BC9E35531E52521F00B8604F /* ConvertedShaders_GLES.swift */, + 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */, ); name = Other; sourceTree = ""; @@ -1198,6 +1208,7 @@ isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( + 1F499A751FDA0F9F0000E37E /* ObjC.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1205,6 +1216,7 @@ isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( + 1F499A761FDA0F9F0000E37E /* ObjC.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1295,13 +1307,14 @@ TargetAttributes = { BC6E7CAA1C39A9D8006DF678 = { CreatedOnToolsVersion = 7.2; - LastSwiftMigration = 0800; + LastSwiftMigration = 0910; }; BC6E7CB41C39A9D8006DF678 = { CreatedOnToolsVersion = 7.2; }; BC9E34E81E524A2200B8604F = { CreatedOnToolsVersion = 8.2.1; + LastSwiftMigration = 0910; ProvisioningStyle = Automatic; }; BC9E34F01E524A2200B8604F = { @@ -1454,6 +1467,7 @@ BC7FD11C1CB0795A00037949 /* NormalBlend.swift in Sources */, BC4EE15E1CB3481F00AD8A65 /* ThresholdSobelEdgeDetection.swift in Sources */, BC7FD1911CB1D2A300037949 /* ImageGenerator.swift in Sources */, + 1F499A731FDA0F9F0000E37E /* ObjC.m in Sources */, BC7FD1201CB079B200037949 /* SaturationBlend.swift in Sources */, BCA4E2491CC3EF26007B51BA /* ColourFASTFeatureDetection.swift in Sources */, BC7FD0FD1CB06E0000037949 /* Position.swift in Sources */, @@ -1637,6 +1651,7 @@ BC9E356E1E5256CE00B8604F /* FalseColor.swift in Sources */, BC9E35881E52572000B8604F /* ThresholdSobelEdgeDetection.swift in Sources */, BC9E356F1E5256D000B8604F /* HighlightsAndShadows.swift in Sources */, + 1F499A741FDA0F9F0000E37E /* ObjC.m in Sources */, BC9E35AA1E52578900B8604F /* Halftone.swift in Sources */, BC9E35961E52574A00B8604F /* ImageBuffer.swift in Sources */, BC9E35831E52571100B8604F /* LocalBinaryPattern.swift in Sources */, @@ -1788,6 +1803,7 @@ ONLY_ACTIVE_ARCH = YES; SDKROOT = macosx; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "Source/GPUImage-Bridging-Header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 3.0; VERSIONING_SYSTEM = "apple-generic"; @@ -1831,6 +1847,7 @@ MACOSX_DEPLOYMENT_TARGET = 10.9; SDKROOT = macosx; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "Source/GPUImage-Bridging-Header.h"; SWIFT_VERSION = 3.0; VERSIONING_SYSTEM = "apple-generic"; VERSION_INFO_PREFIX = ""; @@ -1910,6 +1927,7 @@ isa = XCBuildConfiguration; buildSettings = { CLANG_ANALYZER_NONNULL = YES; + CLANG_ENABLE_MODULES = YES; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; @@ -1930,6 +1948,7 @@ SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 3.0; TARGETED_DEVICE_FAMILY = "1,2"; }; @@ -1939,6 +1958,7 @@ isa = XCBuildConfiguration; buildSettings = { CLANG_ANALYZER_NONNULL = YES; + CLANG_ENABLE_MODULES = YES; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 60ad430d..c6527caa 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -78,7 +78,7 @@ open class BasicOperation: ImageProcessingOperation { } deinit { - debugPrint("Deallocating operation: \(self)") + //debugPrint("Deallocating operation: \(self)") } // MARK: - diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 4af692a1..0affefba 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -97,7 +97,7 @@ public class Framebuffer { if (!textureOverride) { var mutableTexture = texture glDeleteTextures(1, &mutableTexture) - debugPrint("Delete texture at size: \(size)") + //debugPrint("Delete texture at size: \(size)") } if let framebuffer = framebuffer { @@ -174,7 +174,7 @@ public class Framebuffer { framebufferRetainCount -= 1 if (framebufferRetainCount < 1) { if ((framebufferRetainCount < 0) && (cache != nil)) { - print("WARNING: Tried to overrelease a framebuffer") + //print("WARNING: Tried to overrelease a framebuffer") } framebufferRetainCount = 0 cache?.returnToCache(self) diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index f62575c7..7b905d89 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -31,7 +31,7 @@ public class FramebufferCache { framebuffer.orientation = orientation } else { do { - debugPrint("Generating new framebuffer at size: \(size)") + //debugPrint("Generating new framebuffer at size: \(size)") framebuffer = try Framebuffer(context:context, orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) framebuffer.cache = self diff --git a/framework/Source/GPUImage-Bridging-Header.h b/framework/Source/GPUImage-Bridging-Header.h new file mode 100644 index 00000000..4ca4ed7e --- /dev/null +++ b/framework/Source/GPUImage-Bridging-Header.h @@ -0,0 +1,14 @@ +// +// GPUImage-Bridging-Header.h +// GPUImage +// +// Created by Josh Bernfeld on 12/7/17. +// Copyright © 2017 Sunset Lake Software LLC. All rights reserved. +// + +#ifndef GPUImage_Bridging_Header_h +#define GPUImage_Bridging_Header_h + +#import "ObjC.h" + +#endif /* GPUImage_Bridging_Header_h */ diff --git a/framework/Source/ImageOrientation.swift b/framework/Source/ImageOrientation.swift index 59013707..afdc073c 100644 --- a/framework/Source/ImageOrientation.swift +++ b/framework/Source/ImageOrientation.swift @@ -33,7 +33,7 @@ public enum Rotation { case rotateClockwiseAndFlipVertically case rotateClockwiseAndFlipHorizontally - func flipsDimensions() -> Bool { + public func flipsDimensions() -> Bool { switch self { case .noRotation, .rotate180, .flipHorizontally, .flipVertically: return false case .rotateCounterclockwise, .rotateClockwise, .rotateClockwiseAndFlipVertically, .rotateClockwiseAndFlipHorizontally: return true diff --git a/framework/Source/ObjC.h b/framework/Source/ObjC.h new file mode 100644 index 00000000..86dbae1d --- /dev/null +++ b/framework/Source/ObjC.h @@ -0,0 +1,14 @@ +// +// ObjC.h +// GPUImage2 +// +// Created by Josh Bernfeld on 11/23/17. +// + +#import + +@interface ObjC : NSObject + ++ (BOOL)catchException:(void(^)(void))tryBlock error:(__autoreleasing NSError **)error; + +@end diff --git a/framework/Source/ObjC.m b/framework/Source/ObjC.m new file mode 100644 index 00000000..ad687703 --- /dev/null +++ b/framework/Source/ObjC.m @@ -0,0 +1,23 @@ +// +// ObjC.m +// GPUImage2 +// +// Created by Josh Bernfeld on 11/23/17. +// + +#import "ObjC.h" + +@implementation ObjC + ++ (BOOL)catchException:(void(^)(void))tryBlock error:(__autoreleasing NSError **)error { + @try { + tryBlock(); + return YES; + } + @catch (NSException *exception) { + *error = [[NSError alloc] initWithDomain:exception.name code:0 userInfo:exception.userInfo]; + return NO; + } +} + +@end diff --git a/framework/Source/OpenGLContext_Shared.swift b/framework/Source/OpenGLContext_Shared.swift index 20473a30..6fcbf83d 100755 --- a/framework/Source/OpenGLContext_Shared.swift +++ b/framework/Source/OpenGLContext_Shared.swift @@ -23,7 +23,7 @@ extension OpenGLContext { if let shaderFromCache = shaderCache[lookupKeyForShaderProgram] { return shaderFromCache } else { - return try sharedImageProcessingContext.runOperationSynchronously{ + return try self.runOperationSynchronously{ let program = try ShaderProgram(vertexShader:vertexShader, fragmentShader:fragmentShader) self.shaderCache[lookupKeyForShaderProgram] = program return program diff --git a/framework/Source/OpenGLRendering.swift b/framework/Source/OpenGLRendering.swift index 020b0b12..ba55c790 100755 --- a/framework/Source/OpenGLRendering.swift +++ b/framework/Source/OpenGLRendering.swift @@ -63,7 +63,7 @@ public let standardImageVertices:[GLfloat] = [-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, public let verticallyInvertedImageVertices:[GLfloat] = [-1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0] // "position" and "inputTextureCoordinate", "inputTextureCoordinate2" attribute naming follows the convention of the old GPUImage -public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderUniformSettings? = nil, vertices:[GLfloat]? = nil, vertexBufferObject:GLuint? = nil, inputTextures:[InputTextureProperties]) { +public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderUniformSettings? = nil, vertices:[GLfloat]? = nil, vertexBufferObject:GLuint? = nil, inputTextures:[InputTextureProperties], context: OpenGLContext = sharedImageProcessingContext) { switch (vertices, vertexBufferObject) { case (.none, .some): break case (.some, .none): break @@ -71,7 +71,7 @@ public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderU case (.none, .none): fatalError("Can't specify both vertices and a VBO in renderQuadWithShader()") } - sharedImageProcessingContext.makeCurrentContext() + context.makeCurrentContext() shader.use() uniformSettings?.restoreShaderSettings(shader) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 65611ea7..c6ca8296 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -51,6 +51,15 @@ public extension ImageSource { targets.removeAll() } + public func remove(_ target:ImageConsumer) { + for (testTarget, index) in targets { + if(target === testTarget) { + target.removeSourceAtIndex(index) + targets.remove(target) + } + } + } + public func updateTargetsWithFramebuffer(_ framebuffer:Framebuffer) { if targets.count == 0 { // Deal with the case where no targets are attached by immediately returning framebuffer to cache framebuffer.lock() @@ -158,6 +167,16 @@ public class TargetContainer:Sequence { } #endif } + + public func remove(_ target:ImageConsumer) { + #if os(Linux) + self.targets = self.targets.filter { $0.value !== target } + #else + dispatchQueue.async{ + self.targets = self.targets.filter { $0.value !== target } + } + #endif + } } public class SourceContainer { diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index bdf32f1d..f87aec8c 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -65,6 +65,7 @@ func runOnMainQueue(_ mainThreadOperation:() -> T) -> T { public protocol SerialDispatch { var serialDispatchQueue:DispatchQueue { get } var dispatchQueueKey:DispatchSpecificKey { get } + var dispatchQueueKeyValue:Int { get } func makeCurrentContext() } @@ -78,7 +79,7 @@ public extension SerialDispatch { public func runOperationSynchronously(_ operation:() -> ()) { // TODO: Verify this works as intended - if (DispatchQueue.getSpecific(key:self.dispatchQueueKey) == 81) { + if (DispatchQueue.getSpecific(key:self.dispatchQueueKey) == self.dispatchQueueKeyValue) { operation() } else { self.serialDispatchQueue.sync { diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index abf50b63..7e2013bc 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -59,7 +59,7 @@ public class ShaderProgram { } deinit { - debugPrint("Shader deallocated") + //debugPrint("Shader deallocated") if (vertexShader != nil) { glDeleteShader(vertexShader) diff --git a/framework/Source/Size.swift b/framework/Source/Size.swift index 07604c39..4e55e925 100644 --- a/framework/Source/Size.swift +++ b/framework/Source/Size.swift @@ -6,4 +6,4 @@ public struct Size { self.width = width self.height = height } -} \ No newline at end of file +} diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 0c7f4950..154ef4ad 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -51,14 +51,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var audioEncodingTarget:AudioEncodingTarget? { didSet { guard let audioEncodingTarget = audioEncodingTarget else { - self.removeAudioInputsAndOutputs() + //self.removeAudioInputsAndOutputs() return } do { try self.addAudioInputsAndOutputs() audioEncodingTarget.activateAudioTrack() } catch { - fatalError("ERROR: Could not connect audio target with error: \(error)") + print("ERROR: Could not connect audio target with error: \(error)") } } } @@ -66,9 +66,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public let targets = TargetContainer() public var delegate: CameraDelegate? public let captureSession:AVCaptureSession - let inputCamera:AVCaptureDevice! + public let inputCamera:AVCaptureDevice! let videoInput:AVCaptureDeviceInput! - let videoOutput:AVCaptureVideoDataOutput! + public let videoOutput:AVCaptureVideoDataOutput! var microphone:AVCaptureDevice? var audioInput:AVCaptureDeviceInput? var audioOutput:AVCaptureAudioDataOutput? @@ -151,6 +151,15 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.addOutput(videoOutput) } captureSession.sessionPreset = sessionPreset + + if let connections = videoOutput.connections as? [AVCaptureConnection] { + for connection in connections { + if(connection.isVideoMirroringSupported) { + connection.isVideoMirrored = (location == .frontFacing) + } + } + } + captureSession.commitConfiguration() super.init() @@ -161,7 +170,10 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer deinit { sharedImageProcessingContext.runOperationSynchronously{ self.stopCapture() - self.videoOutput.setSampleBufferDelegate(nil, queue:nil) + //Fix crash when hitting catch block in init block + if(self.videoOutput != nil) { + self.videoOutput.setSampleBufferDelegate(nil, queue:nil) + } self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) } } @@ -278,6 +290,10 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if (captureSession.isRunning) { captureSession.stopRunning() } + + //Fixes need to call this after calling stopCapture + //when app will enter background + glFinish() } public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { @@ -287,7 +303,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer // MARK: - // MARK: Audio processing - func addAudioInputsAndOutputs() throws { + public func addAudioInputsAndOutputs() throws { guard (audioOutput == nil) else { return } captureSession.beginConfiguration() @@ -306,7 +322,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer audioOutput?.setSampleBufferDelegate(self, queue:audioProcessingQueue) } - func removeAudioInputsAndOutputs() { + public func removeAudioInputsAndOutputs() { guard (audioOutput != nil) else { return } captureSession.beginConfiguration() diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 8db20754..96c67790 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -6,7 +6,7 @@ public class MovieInput: ImageSource { let yuvConversionShader:ShaderProgram let asset:AVAsset - let assetReader:AVAssetReader + var assetReader:AVAssetReader! let playAtActualSpeed:Bool let loop:Bool var videoEncodingIsFinished = false @@ -24,12 +24,6 @@ public class MovieInput: ImageSource { self.loop = loop self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - assetReader = try AVAssetReader(asset:self.asset) - - let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) - readerVideoTrackOutput.alwaysCopiesSampleData = false - assetReader.add(readerVideoTrackOutput) // TODO: Audio here } @@ -41,16 +35,48 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Playback control + + public func createReader() -> AVAssetReader? + { + var assetRead: AVAssetReader? + do { + assetRead = try AVAssetReader.init(asset: self.asset) + + let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) + readerVideoTrackOutput.alwaysCopiesSampleData = false + + assetRead!.add(readerVideoTrackOutput) + } catch { + print("Could not create asset reader: \(error)") + } + + return assetRead + } + public func start() { + assetReader = createReader() + + if(assetReader == nil) { return } + asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{ DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: { guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return } - - guard self.assetReader.startReading() else { - print("Couldn't start reading") + + do { + try ObjC.catchException { + guard self.assetReader.startReading() else { + print("Couldn't start reading") + return + } + } + } + catch { + print("Couldn't start reading \(error)") return } + var readerVideoTrackOutput:AVAssetReaderOutput? = nil; @@ -68,7 +94,7 @@ public class MovieInput: ImageSource { self.assetReader.cancelReading() if (self.loop) { - // TODO: Restart movie processing + self.start() } else { self.endProcessing() } @@ -78,8 +104,14 @@ public class MovieInput: ImageSource { } public func cancel() { - assetReader.cancelReading() - self.endProcessing() + if(assetReader != nil) { + assetReader.cancelReading() + self.endProcessing() + } + + //Fixes need to call this after calling stopCapture + //when app will enter background + glFinish() } func endProcessing() { diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 03bc9593..a79403d9 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -1,5 +1,7 @@ import AVFoundation +extension String: Error {} + public protocol AudioEncodingTarget { func activateAudioTrack() func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) @@ -12,28 +14,37 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let assetWriter:AVAssetWriter let assetWriterVideoInput:AVAssetWriterInput var assetWriterAudioInput:AVAssetWriterInput? - + let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor let size:Size let colorSwizzlingShader:ShaderProgram private var isRecording = false + private var isFinishing = false + private var finishRecordingCompletionCallback:(() -> Void)? = nil private var videoEncodingIsFinished = false private var audioEncodingIsFinished = false private var startTime:CMTime? - private var previousFrameTime = kCMTimeNegativeInfinity - private var previousAudioTime = kCMTimeNegativeInfinity + private var firstFrameTime: CMTime? + private var previousFrameTime: CMTime? + private var previousAudioTime: CMTime? private var encodingLiveVideo:Bool var pixelBuffer:CVPixelBuffer? = nil var renderFramebuffer:Framebuffer! + let movieProcessingContext: OpenGLContext + public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { - if sharedImageProcessingContext.supportsTextureCaches() { - self.colorSwizzlingShader = sharedImageProcessingContext.passthroughShader + imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup + let movieProcessingContext = OpenGLContext() + + if movieProcessingContext.supportsTextureCaches() { + self.colorSwizzlingShader = movieProcessingContext.passthroughShader } else { - self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:ColorSwizzlingFragmentShader)} + self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput"){try movieProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:ColorSwizzlingFragmentShader)} } self.size = size + assetWriter = try AVAssetWriter(url:URL, fileType:fileType) // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) @@ -54,122 +65,193 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. - let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value:Int32(kCVPixelFormatType_32BGRA)), - kCVPixelBufferWidthKey as String:NSNumber(value:size.width), - kCVPixelBufferHeightKey as String:NSNumber(value:size.height)] + let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value:Int32(kCVPixelFormatType_32BGRA)), + kCVPixelBufferWidthKey as String:NSNumber(value:self.size.width), + kCVPixelBufferHeightKey as String:NSNumber(value:self.size.height)] assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) assetWriter.add(assetWriterVideoInput) + + self.movieProcessingContext = movieProcessingContext } - public func startRecording() { + public func startRecording(_ completionCallback:((_ started: Bool) -> Void)? = nil) { startTime = nil - sharedImageProcessingContext.runOperationSynchronously{ - self.isRecording = self.assetWriter.startWriting() - - CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) - - /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion - * regardless of the kCVImageBufferYCbCrMatrixKey value. - * Tagging the resulting video file as BT.601, is the best option right now. - * Creating a proper BT.709 video is not possible at the moment. - */ - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) - - let bufferSize = GLSize(self.size) - var cachedTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, self.pixelBuffer!, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) - let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - - self.renderFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + + movieProcessingContext.runOperationAsynchronously { + do { + try ObjC.catchException { + self.isRecording = self.assetWriter.startWriting() + } + + if(!self.isRecording) { + throw "Could not start asset writer: \(self.assetWriter.error)" + } + + guard let pixelBufferPool = self.assetWriterPixelBufferInput.pixelBufferPool else { + //When the pixelBufferPool returns nil, check the following: + //https://stackoverflow.com/a/20110179/1275014 + throw "Pixel buffer pool was nil" + } + + CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &self.pixelBuffer) + + guard let pixelBuffer = self.pixelBuffer else { + throw "Unable to create pixel buffer" + } + + /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion + * regardless of the kCVImageBufferYCbCrMatrixKey value. + * Tagging the resulting video file as BT.601, is the best option right now. + * Creating a proper BT.709 video is not possible at the moment. + */ + CVBufferSetAttachment(pixelBuffer, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) + + let bufferSize = GLSize(self.size) + var cachedTextureRef:CVOpenGLESTexture? = nil + let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) + + self.renderFramebuffer = try Framebuffer(context:self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + + completionCallback?(true) + } catch { + self.assetWriter.cancelWriting() + self.isRecording = false + + completionCallback?(false) + } } + } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { - sharedImageProcessingContext.runOperationSynchronously{ - self.isRecording = false + movieProcessingContext.runOperationAsynchronously{ + guard self.isRecording else { return } + guard !self.isFinishing else { return } - if (self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown) { - sharedImageProcessingContext.runOperationAsynchronously{ - completionCallback?() - } + self.finishRecordingCompletionCallback = completionCallback + + if (self.assetWriter.status != .writing) { + completionCallback?() return } - if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) { - self.videoEncodingIsFinished = true - self.assetWriterVideoInput.markAsFinished() - } - if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) { - self.audioEncodingIsFinished = true - self.assetWriterAudioInput?.markAsFinished() - } + + self.finishAudioWriting() - // Why can't I use ?? here for the callback? - if let callback = completionCallback { - self.assetWriter.finishWriting(completionHandler: callback) - } else { - self.assetWriter.finishWriting{} + // Check if there was audio + if(self.previousAudioTime != nil) { + // Video will finish once a there is a frame time that is later than the last recorded audio buffer time + self.isFinishing = true + // Call finishVideoWriting again just incase we don't recieve any additional buffers + self.movieProcessingContext.serialDispatchQueue.asyncAfter(deadline: .now() + 0.1) { + self.finishVideoWriting() + } + } + else { + // We can finish immediately since there is no audio + self.finishVideoWriting() } } } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - defer { - framebuffer.unlock() - } - guard isRecording else { return } - // Ignore still images and other non-video updates (do I still need this?) - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case - guard (frameTime != previousFrameTime) else { return } - - if (startTime == nil) { - if (assetWriter.status != .writing) { - assetWriter.startWriting() - } - - assetWriter.startSession(atSourceTime: frameTime) - startTime = frameTime - } + private func finishVideoWriting() { + guard self.isRecording else { return } - // TODO: Run the following on an internal movie recording dispatch queue, context - guard (assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo)) else { - debugPrint("Had to drop a frame at time \(frameTime)") - return - } + self.isFinishing = false + self.isRecording = false - if !sharedImageProcessingContext.supportsTextureCaches() { - let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) - guard ((pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { return } + if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) { + self.videoEncodingIsFinished = true + self.assetWriterVideoInput.markAsFinished() } - renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) - - if (!assetWriterPixelBufferInput.append(pixelBuffer!, withPresentationTime:frameTime)) { - debugPrint("Problem appending pixel buffer at time: \(frameTime)") + self.assetWriter.finishWriting{ + self.finishRecordingCompletionCallback?() } + } + + private func finishAudioWriting() { + if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) { + self.audioEncodingIsFinished = true + self.assetWriterAudioInput?.markAsFinished() + } + } + + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + glFinish(); - CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - if !sharedImageProcessingContext.supportsTextureCaches() { - pixelBuffer = nil + movieProcessingContext.runOperationAsynchronously { + guard self.renderFramebuffer != nil else { return } + guard self.isRecording else { return } + guard self.assetWriter.status == .writing else { return } + guard !self.videoEncodingIsFinished else { return } + + // Ignore still images and other non-video updates (do I still need this?) + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } + + // Check if we are finishing and if this frame is later than the last recorded audio buffer + // Note: isFinishing is only set when there is an audio buffer, otherwise the video is finished immediately + if self.isFinishing, + let previousAudioTime = self.previousAudioTime, + CMTimeCompare(previousAudioTime, frameTime) == -1 { + // Finish recording + self.finishVideoWriting() + return + } + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case + guard (frameTime != self.previousFrameTime) else { return } + + if (self.startTime == nil) { + self.assetWriter.startSession(atSourceTime: frameTime) + self.startTime = frameTime + self.firstFrameTime = frameTime + } + + guard (self.assetWriterVideoInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { + debugPrint("Had to drop a frame at time \(frameTime)") + return + } + + if !self.movieProcessingContext.supportsTextureCaches() { + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) + guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { return } + } + + self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) + + if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { + debugPrint("Problem appending pixel buffer at time: \(frameTime)") + } + + CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + if !self.movieProcessingContext.supportsTextureCaches() { + self.pixelBuffer = nil + } + + sharedImageProcessingContext.runOperationAsynchronously { + //Must be called from the context it came from + framebuffer.unlock() + } } } func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { - if !sharedImageProcessingContext.supportsTextureCaches() { - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size)) + if !movieProcessingContext.supportsTextureCaches() { + renderFramebuffer = movieProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size)) renderFramebuffer.lock() } renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:movieProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: movieProcessingContext) - if sharedImageProcessingContext.supportsTextureCaches() { + if movieProcessingContext.supportsTextureCaches() { glFinish() } else { glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) @@ -179,7 +261,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: - // MARK: Audio support - + public func activateAudioTrack() { // TODO: Add ability to set custom output settings assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:nil) @@ -190,23 +272,35 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) { guard let assetWriterAudioInput = assetWriterAudioInput else { return } - sharedImageProcessingContext.runOperationSynchronously{ + movieProcessingContext.runOperationAsynchronously{ + guard self.isRecording else { return } + guard self.assetWriter.status == .writing else { return } + guard !self.audioEncodingIsFinished else { return } + let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - if (self.startTime == nil) { - if (self.assetWriter.status != .writing) { - self.assetWriter.startWriting() + + if let firstFrameTime = self.firstFrameTime { + // Check if the time of this audio sample is before the time of the first frame + // If so then ignore it + if (CMTimeCompare(currentSampleTime, firstFrameTime) == -1) { + return } - - self.assetWriter.startSession(atSourceTime: currentSampleTime) - self.startTime = currentSampleTime } + else { + // We have not recorded any video yet, so we do not know if this audio sample + // falls before or after the time of the first frame which has not yet come in. + // There may be a better solution for this case + return + } + + self.previousAudioTime = currentSampleTime guard (assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { return } if (!assetWriterAudioInput.append(sampleBuffer)) { - print("Trouble appending audio sample buffer") + print("Trouble appending audio sample buffer: \(self.assetWriter.error)") } } } diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index fc5c81d8..64975cc6 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -4,6 +4,8 @@ import UIKit // TODO: Find a way to warn people if they set this after the context has been created var imageProcessingShareGroup:EAGLSharegroup? = nil +var dispatchQueKeyValueCounter = 81 + public class OpenGLContext: SerialDispatch { lazy var framebufferCache:FramebufferCache = { return FramebufferCache(context:self) @@ -27,12 +29,15 @@ public class OpenGLContext: SerialDispatch { public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) public let dispatchQueueKey = DispatchSpecificKey() + public let dispatchQueueKeyValue: Int // MARK: - // MARK: Initialization and teardown init() { - serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:81) + dispatchQueueKeyValue = dispatchQueKeyValueCounter + serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:dispatchQueueKeyValue) + dispatchQueKeyValueCounter += 1 let generatedContext:EAGLContext? if let shareGroup = imageProcessingShareGroup { diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 3bc4f382..8c2c31d9 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -58,9 +58,11 @@ public class RenderView:UIView, ImageConsumer { glGenRenderbuffers(1, &newDisplayRenderbuffer) displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) - - sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.layer as! CAEAGLLayer) - + + DispatchQueue.main.sync { + sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.layer as! CAEAGLLayer) + } + var backingWidth:GLint = 0 var backingHeight:GLint = 0 glGetRenderbufferParameteriv(GLenum(GL_RENDERBUFFER), GLenum(GL_RENDERBUFFER_WIDTH), &backingWidth) From 4646afa659df77b68ef706ef1622768c034bbfcd Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 11 Dec 2017 13:22:02 -0800 Subject: [PATCH 002/332] Account for runtime occasional runtime errors in capture session Fix glfinish crash --- framework/Source/iOS/Camera.swift | 28 ++++++++++++++++++++------- framework/Source/iOS/MovieInput.swift | 4 ---- 2 files changed, 21 insertions(+), 11 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 154ef4ad..6f7c944a 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -85,6 +85,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var totalFrameTimeDuringCapture:Double = 0.0 var framesSinceLastCheck = 0 var lastCheckTime = CFAbsoluteTimeGetCurrent() + + var captureSessionRestartAttempts = 0 public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true) throws { @@ -165,19 +167,35 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer super.init() videoOutput.setSampleBufferDelegate(self, queue:cameraProcessingQueue) + + NotificationCenter.default.addObserver(self, selector: #selector(Camera.captureSessionRuntimeError(note:)), name: NSNotification.Name.AVCaptureSessionRuntimeError, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(Camera.captureSessionDidStartRunning(note:)), name: NSNotification.Name.AVCaptureSessionDidStartRunning, object: nil) } deinit { sharedImageProcessingContext.runOperationSynchronously{ self.stopCapture() //Fix crash when hitting catch block in init block - if(self.videoOutput != nil) { - self.videoOutput.setSampleBufferDelegate(nil, queue:nil) - } + self.videoOutput?.setSampleBufferDelegate(nil, queue:nil) + self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) } } + func captureSessionRuntimeError(note: NSNotification) { + print("Capture Session Runtime Error: \(note.userInfo)") + if(self.captureSessionRestartAttempts < 1) { + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + self.startCapture() + } + self.captureSessionRestartAttempts += 1 + } + } + + func captureSessionDidStartRunning(note: NSNotification) { + self.captureSessionRestartAttempts = 0 + } + public func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) { guard (captureOutput != audioOutput) else { self.processAudioSampleBuffer(sampleBuffer) @@ -290,10 +308,6 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if (captureSession.isRunning) { captureSession.stopRunning() } - - //Fixes need to call this after calling stopCapture - //when app will enter background - glFinish() } public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 96c67790..bbf68ca3 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -108,10 +108,6 @@ public class MovieInput: ImageSource { assetReader.cancelReading() self.endProcessing() } - - //Fixes need to call this after calling stopCapture - //when app will enter background - glFinish() } func endProcessing() { From e146826db25c3622a275c5a505646fbb5474a292 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Wed, 20 Dec 2017 01:36:30 -0800 Subject: [PATCH 003/332] Update movieInput to allow for AVComposition, looping and fix distrotion from pull request #183 --- framework/Source/iOS/MovieInput.swift | 139 ++++++++++++++++++-------- 1 file changed, 99 insertions(+), 40 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index bbf68ca3..6d5007e9 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -6,9 +6,11 @@ public class MovieInput: ImageSource { let yuvConversionShader:ShaderProgram let asset:AVAsset - var assetReader:AVAssetReader! + let videoComposition: AVVideoComposition? + var assetReader:AVAssetReader? + var started = false let playAtActualSpeed:Bool - let loop:Bool + public var loop:Bool var videoEncodingIsFinished = false var previousFrameTime = kCMTimeZero var previousActualFrameTime = CFAbsoluteTimeGetCurrent() @@ -18,8 +20,9 @@ public class MovieInput: ImageSource { // TODO: Add movie reader synchronization // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works - public init(asset:AVAsset, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false) throws { self.asset = asset + self.videoComposition = videoComposition self.playAtActualSpeed = playAtActualSpeed self.loop = loop self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} @@ -30,7 +33,7 @@ public class MovieInput: ImageSource { public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] let inputAsset = AVURLAsset(url:url, options:inputOptions) - try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop) + try self.init(asset:inputAsset, videoComposition: nil, playAtActualSpeed:playAtActualSpeed, loop:loop) } // MARK: - @@ -38,35 +41,48 @@ public class MovieInput: ImageSource { public func createReader() -> AVAssetReader? { - var assetRead: AVAssetReader? + var assetReader: AVAssetReader? do { - assetRead = try AVAssetReader.init(asset: self.asset) + let outputSettings:[String:AnyObject] = + [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) - readerVideoTrackOutput.alwaysCopiesSampleData = false + assetReader = try AVAssetReader.init(asset: self.asset) - assetRead!.add(readerVideoTrackOutput) + if(self.videoComposition == nil) { + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) + readerVideoTrackOutput.alwaysCopiesSampleData = false + assetReader!.add(readerVideoTrackOutput) + } + else { + let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: AVMediaTypeVideo), videoSettings: outputSettings) + readerVideoTrackOutput.videoComposition = self.videoComposition + assetReader!.add(readerVideoTrackOutput) + } + } catch { print("Could not create asset reader: \(error)") } - return assetRead + return assetReader } public func start() { - assetReader = createReader() + if(self.started) { return } + if(assetReader == nil) { assetReader = createReader() } if(assetReader == nil) { return } + self.started = true + asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{ DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: { guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return } + guard let assetReader = self.assetReader else { return } do { try ObjC.catchException { - guard self.assetReader.startReading() else { + guard assetReader.startReading() else { print("Couldn't start reading") return } @@ -76,24 +92,24 @@ public class MovieInput: ImageSource { print("Couldn't start reading \(error)") return } - var readerVideoTrackOutput:AVAssetReaderOutput? = nil; - for output in self.assetReader.outputs { + for output in assetReader.outputs { if(output.mediaType == AVMediaTypeVideo) { readerVideoTrackOutput = output; } } - while (self.assetReader.status == .reading) { + while (assetReader.status == .reading) { self.readNextVideoFrame(from:readerVideoTrackOutput!) } - if (self.assetReader.status == .completed) { - self.assetReader.cancelReading() + if (assetReader.status == .completed) { + assetReader.cancelReading() if (self.loop) { + self.endProcessing() self.start() } else { self.endProcessing() @@ -104,20 +120,23 @@ public class MovieInput: ImageSource { } public func cancel() { - if(assetReader != nil) { + if let assetReader = self.assetReader { assetReader.cancelReading() self.endProcessing() } } func endProcessing() { - + self.assetReader = nil + self.started = false } // MARK: - // MARK: Internal processing functions func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { + guard let assetReader = self.assetReader else { return } + if ((assetReader.status == .reading) && !videoEncodingIsFinished) { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { if (playAtActualSpeed) { @@ -166,42 +185,82 @@ public class MovieInput: ImageSource { self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) } + //Code from pull request https://github.com/BradLarson/GPUImage2/pull/183 func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - + let conversionMatrix = colorConversionMatrix601FullRangeDefault // TODO: Get this color query working -// if let colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, nil) { -// if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == .EqualTo) { -// _preferredConversion = kColorConversion601FullRange -// } else { -// _preferredConversion = kColorConversion709 -// } -// } else { -// _preferredConversion = kColorConversion601FullRange -// } + // if let colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, nil) { + // if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == .EqualTo) { + // _preferredConversion = kColorConversion601FullRange + // } else { + // _preferredConversion = kColorConversion709 + // } + // } else { + // _preferredConversion = kColorConversion601FullRange + // } let startTime = CFAbsoluteTimeGetCurrent() - - let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) - luminanceFramebuffer.lock() + + var luminanceGLTexture: CVOpenGLESTexture? + glActiveTexture(GLenum(GL_TEXTURE0)) - glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) - glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 0)) - let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) - chrominanceFramebuffer.lock() + let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) + + assert(luminanceGLTextureResult == kCVReturnSuccess && luminanceGLTexture != nil) + + let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!) + + glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); + + let luminanceFramebuffer: Framebuffer + do { + luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture) + } catch { + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return + } + + luminanceFramebuffer.cache = sharedImageProcessingContext.framebufferCache + luminanceFramebuffer.lock() + + + var chrominanceGLTexture: CVOpenGLESTexture? + glActiveTexture(GLenum(GL_TEXTURE1)) - glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) - glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 1)) + + let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) + + assert(chrominanceGLTextureResult == kCVReturnSuccess && chrominanceGLTexture != nil) + + let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!) + + glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); + + let chrominanceFramebuffer: Framebuffer + do { + chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture) + } catch { + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return + } + + chrominanceFramebuffer.cache = sharedImageProcessingContext.framebufferCache + chrominanceFramebuffer.lock() let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - + movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) self.updateTargetsWithFramebuffer(movieFramebuffer) From 3fbe42f4a51035fc8a4dbb22d3db9ea2abfd8863 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Fri, 22 Dec 2017 16:47:43 -0800 Subject: [PATCH 004/332] Remove hard crashes, better errors --- framework/Source/iOS/MovieInput.swift | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 6d5007e9..41f0bfd2 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -83,13 +83,13 @@ public class MovieInput: ImageSource { do { try ObjC.catchException { guard assetReader.startReading() else { - print("Couldn't start reading") + print("Couldn't start reading: \(assetReader.error)") return } } } catch { - print("Couldn't start reading \(error)") + print("Couldn't start reading: \(error)") return } @@ -211,7 +211,10 @@ public class MovieInput: ImageSource { let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) - assert(luminanceGLTextureResult == kCVReturnSuccess && luminanceGLTexture != nil) + if(luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil) { + print("Could not create LuminanceGLTexture") + return + } let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!) @@ -230,14 +233,16 @@ public class MovieInput: ImageSource { luminanceFramebuffer.cache = sharedImageProcessingContext.framebufferCache luminanceFramebuffer.lock() - var chrominanceGLTexture: CVOpenGLESTexture? glActiveTexture(GLenum(GL_TEXTURE1)) let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) - assert(chrominanceGLTextureResult == kCVReturnSuccess && chrominanceGLTexture != nil) + if(chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil) { + print("Could not create ChrominanceGLTexture") + return + } let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!) From 5e8c8a083e2d288058e65820ee34584d1829f429 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sat, 23 Dec 2017 22:20:01 -0800 Subject: [PATCH 005/332] pr#95 image buffer new frame available was creating 1 too many internal buffers --- framework/Source/Operations/ImageBuffer.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/Operations/ImageBuffer.swift b/framework/Source/Operations/ImageBuffer.swift index 6b5b5675..29e1100e 100644 --- a/framework/Source/Operations/ImageBuffer.swift +++ b/framework/Source/Operations/ImageBuffer.swift @@ -10,7 +10,7 @@ public class ImageBuffer: ImageProcessingOperation { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { bufferedFramebuffers.append(framebuffer) - if (bufferedFramebuffers.count > Int(bufferSize)) { + if (bufferedFramebuffers.count >= Int(bufferSize)) { let releasedFramebuffer = bufferedFramebuffers.removeFirst() updateTargetsWithFramebuffer(releasedFramebuffer) releasedFramebuffer.unlock() From 79b02b2c819dd898bbf18742785ec031ac68aa3e Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sat, 23 Dec 2017 22:21:00 -0800 Subject: [PATCH 006/332] pr#84 Fix framebuffer overrelease error due to async target counts --- framework/Source/Pipeline.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index c6ca8296..a0a3e56a 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -101,7 +101,7 @@ class WeakImageConsumer { public class TargetContainer:Sequence { var targets = [WeakImageConsumer]() - var count:Int { get {return targets.count}} + var count:Int { get { return self.dispatchQueue.sync{return targets.count}}} #if !os(Linux) let dispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.targetContainerQueue", attributes: []) #endif From e256614349c90ff65134e62817c24b5283b87835 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sat, 23 Dec 2017 22:22:57 -0800 Subject: [PATCH 007/332] Uncomment warning --- framework/Source/Framebuffer.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 0affefba..5af05417 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -174,7 +174,7 @@ public class Framebuffer { framebufferRetainCount -= 1 if (framebufferRetainCount < 1) { if ((framebufferRetainCount < 0) && (cache != nil)) { - //print("WARNING: Tried to overrelease a framebuffer") + print("WARNING: Tried to overrelease a framebuffer") } framebufferRetainCount = 0 cache?.returnToCache(self) From 15fd172a494bc2b4993e995b548d418c41afd785 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 24 Dec 2017 18:43:21 -0800 Subject: [PATCH 008/332] Fix render buffer never being locked on BasicOperation --- framework/Source/BasicOperation.swift | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index c6527caa..5c98a296 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -2,12 +2,12 @@ import Foundation public func defaultVertexShaderForInputs(_ inputCount:UInt) -> String { switch inputCount { - case 1: return OneInputVertexShader - case 2: return TwoInputVertexShader - case 3: return ThreeInputVertexShader - case 4: return FourInputVertexShader - case 5: return FiveInputVertexShader - default: return OneInputVertexShader + case 1: return OneInputVertexShader + case 2: return TwoInputVertexShader + case 3: return ThreeInputVertexShader + case 4: return FourInputVertexShader + case 5: return FiveInputVertexShader + default: return OneInputVertexShader } } @@ -35,10 +35,10 @@ open class BasicOperation: ImageProcessingOperation { } public var activatePassthroughOnNextFrame:Bool = false public var uniformSettings = ShaderUniformSettings() - + // MARK: - // MARK: Internal - + public let targets = TargetContainer() public let sources = SourceContainer() var shader:ShaderProgram @@ -51,7 +51,7 @@ open class BasicOperation: ImageProcessingOperation { // MARK: - // MARK: Initialization and teardown - + public init(shader:ShaderProgram, numberOfInputs:UInt = 1) { self.maximumInputs = numberOfInputs self.shader = shader @@ -64,7 +64,7 @@ open class BasicOperation: ImageProcessingOperation { self.shader = compiledShader usesAspectRatio = shader.uniformIndex("aspectRatio") != nil } - + public init(vertexShaderFile:URL? = nil, fragmentShaderFile:URL, numberOfInputs:UInt = 1, operationName:String = #file) throws { let compiledShader:ShaderProgram if let vertexShaderFile = vertexShaderFile { @@ -89,7 +89,7 @@ open class BasicOperation: ImageProcessingOperation { previousFramebuffer.unlock() } inputFramebuffers[fromSourceIndex] = framebuffer - + guard (!activatePassthroughOnNextFrame) else { // Use this to allow a bootstrap of cyclical processing, like with a low pass filter activatePassthroughOnNextFrame = false updateTargetsWithFramebuffer(framebuffer) @@ -104,7 +104,10 @@ open class BasicOperation: ImageProcessingOperation { } func renderFrame() { + if(renderFramebuffer != nil) { renderFramebuffer.unlock() } + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil:mask != nil) + renderFramebuffer.lock() let textureProperties = initialTextureProperties() configureFramebufferSpecificUniforms(inputFramebuffers[0]!) @@ -191,3 +194,4 @@ open class BasicOperation: ImageProcessingOperation { } } } + From 4bb89efb68d263025d7f8261a18c0f118b6a837e Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 24 Dec 2017 18:46:28 -0800 Subject: [PATCH 009/332] Image processing shouldnt be done on main thread Fixes random crashes that occur when changing targets of InputImage after it already has a framebuffer rendered --- framework/Source/Pipeline.swift | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index a0a3e56a..d4c96f4b 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -35,10 +35,14 @@ public extension ImageSource { if let targetIndex = atTargetIndex { target.setSource(self, atIndex:targetIndex) targets.append(target, indexAtTarget:targetIndex) - transmitPreviousImage(to:target, atIndex:targetIndex) + sharedImageProcessingContext.runOperationAsynchronously { + self.transmitPreviousImage(to:target, atIndex:targetIndex) + } } else if let indexAtTarget = target.addSource(self) { targets.append(target, indexAtTarget:indexAtTarget) - transmitPreviousImage(to:target, atIndex:indexAtTarget) + sharedImageProcessingContext.runOperationAsynchronously { + self.transmitPreviousImage(to:target, atIndex:indexAtTarget) + } } else { debugPrint("Warning: tried to add target beyond target's input capacity") } From c1acaed67a87138c9659500806f4d717d02d1f7b Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 24 Dec 2017 18:48:24 -0800 Subject: [PATCH 010/332] Fix background thread warning, add delegate to renderview --- framework/Source/iOS/RenderView.swift | 43 ++++++++++++++++++++------- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 8c2c31d9..c54d950c 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -1,8 +1,14 @@ import UIKit +protocol RenderViewDelegate: class { + func didDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) +} + // TODO: Add support for transparency // TODO: Deal with view resizing public class RenderView:UIView, ImageConsumer { + weak var delegate:RenderViewDelegate? + public var backgroundRenderColor = Color.black public var fillMode = FillMode.preserveAspectRatio public var orientation:ImageOrientation = .portrait @@ -17,19 +23,19 @@ public class RenderView:UIView, ImageConsumer { private lazy var displayShader:ShaderProgram = { return sharedImageProcessingContext.passthroughShader }() - + // TODO: Need to set viewport to appropriate size, resize viewport on view reshape required public init?(coder:NSCoder) { super.init(coder:coder) self.commonInit() } - + public override init(frame:CGRect) { super.init(frame:frame) self.commonInit() } - + override public class var layerClass:Swift.AnyClass { get { return CAEAGLLayer.self @@ -49,19 +55,19 @@ public class RenderView:UIView, ImageConsumer { } func createDisplayFramebuffer() { + sharedImageProcessingContext.makeCurrentContext() + var newDisplayFramebuffer:GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) displayFramebuffer = newDisplayFramebuffer glBindFramebuffer(GLenum(GL_FRAMEBUFFER), displayFramebuffer!) - + var newDisplayRenderbuffer:GLuint = 0 glGenRenderbuffers(1, &newDisplayRenderbuffer) displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) - DispatchQueue.main.sync { - sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.layer as! CAEAGLLayer) - } + sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.layer as! CAEAGLLayer) var backingWidth:GLint = 0 var backingHeight:GLint = 0 @@ -72,7 +78,7 @@ public class RenderView:UIView, ImageConsumer { guard ((backingWidth > 0) && (backingHeight > 0)) else { fatalError("View had a zero size") } - + glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_RENDERBUFFER), displayRenderbuffer!) let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) @@ -103,18 +109,33 @@ public class RenderView:UIView, ImageConsumer { } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - if (displayFramebuffer == nil) { - self.createDisplayFramebuffer() + if (self.displayFramebuffer == nil) { + DispatchQueue.main.async { + self.createDisplayFramebuffer() + + sharedImageProcessingContext.runOperationAsynchronously { + self.displayFramebuffer(framebuffer, fromSourceIndex: fromSourceIndex) + } + } + } + else { + self.displayFramebuffer(framebuffer, fromSourceIndex: fromSourceIndex) } + } + + public func displayFramebuffer(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { self.activateDisplayFramebuffer() clearFramebufferWithColor(backgroundRenderColor) - + let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(self.orientation), toFitSize:backingSize) renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(self.orientation)]) framebuffer.unlock() glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) sharedImageProcessingContext.presentBufferForDisplay() + + self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) } } + From a8fe9688cba9ef241e997d444e25940a9dd362a8 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 24 Dec 2017 18:50:51 -0800 Subject: [PATCH 011/332] make sure to cleanup renderframebuffer when done --- framework/Source/BasicOperation.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 5c98a296..220e0de4 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -79,6 +79,7 @@ open class BasicOperation: ImageProcessingOperation { deinit { //debugPrint("Deallocating operation: \(self)") + if(renderFramebuffer != nil) { renderFramebuffer.unlock() } } // MARK: - From 82945ef8cf3516d0205dc15cff33a447d38e2a50 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 24 Dec 2017 18:55:57 -0800 Subject: [PATCH 012/332] No need for async call since these will always be called from the shared que --- framework/Source/BasicOperation.swift | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 220e0de4..62c4c97e 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -187,12 +187,10 @@ open class BasicOperation: ImageProcessingOperation { } public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { - sharedImageProcessingContext.runOperationAsynchronously{ - guard let renderFramebuffer = self.renderFramebuffer, (!renderFramebuffer.timingStyle.isTransient()) else { return } + guard let renderFramebuffer = self.renderFramebuffer, (!renderFramebuffer.timingStyle.isTransient()) else { return } - renderFramebuffer.lock() - target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) - } + renderFramebuffer.lock() + target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) } } From 8d06f0cb4ef20975f6680aa7430f941586c4a2eb Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 24 Dec 2017 19:16:31 -0800 Subject: [PATCH 013/332] revert pr #84, solved by properly locking renderframebuffer of BasicOperation --- framework/Source/Pipeline.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index d4c96f4b..cce38731 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -105,7 +105,7 @@ class WeakImageConsumer { public class TargetContainer:Sequence { var targets = [WeakImageConsumer]() - var count:Int { get { return self.dispatchQueue.sync{return targets.count}}} + var count:Int { get { return targets.count } } #if !os(Linux) let dispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.targetContainerQueue", attributes: []) #endif From a445aea3ad5138391ad2f86e95bcdfb2136fe981 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 24 Dec 2017 19:34:55 -0800 Subject: [PATCH 014/332] Fix framebuffer overelease warning --- framework/Source/BasicOperation.swift | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 62c4c97e..d09b12d0 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -79,7 +79,12 @@ open class BasicOperation: ImageProcessingOperation { deinit { //debugPrint("Deallocating operation: \(self)") - if(renderFramebuffer != nil) { renderFramebuffer.unlock() } + + // Run on the shared que to prevent unlocking if the framebuffer + // is mid-initialization and has not yet been locked + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + if(self?.renderFramebuffer != nil) { self?.renderFramebuffer.unlock() } + } } // MARK: - From 8bd5795e9c99347a4984b6cf01f1da7e92279a85 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 24 Dec 2017 21:04:04 -0800 Subject: [PATCH 015/332] Fix background thread warning without moving to main que --- framework/Source/iOS/RenderView.swift | 22 ++++++---------------- 1 file changed, 6 insertions(+), 16 deletions(-) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index c54d950c..c89bdcbd 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -24,6 +24,8 @@ public class RenderView:UIView, ImageConsumer { return sharedImageProcessingContext.passthroughShader }() + private var internalLayer: CAEAGLLayer! + // TODO: Need to set viewport to appropriate size, resize viewport on view reshape required public init?(coder:NSCoder) { @@ -48,6 +50,8 @@ public class RenderView:UIView, ImageConsumer { let eaglLayer = self.layer as! CAEAGLLayer eaglLayer.isOpaque = true eaglLayer.drawableProperties = [NSNumber(value:false): kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8: kEAGLDrawablePropertyColorFormat] + + self.internalLayer = eaglLayer } deinit { @@ -55,8 +59,6 @@ public class RenderView:UIView, ImageConsumer { } func createDisplayFramebuffer() { - sharedImageProcessingContext.makeCurrentContext() - var newDisplayFramebuffer:GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) displayFramebuffer = newDisplayFramebuffer @@ -67,7 +69,7 @@ public class RenderView:UIView, ImageConsumer { displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) - sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.layer as! CAEAGLLayer) + sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.internalLayer) var backingWidth:GLint = 0 var backingHeight:GLint = 0 @@ -110,20 +112,8 @@ public class RenderView:UIView, ImageConsumer { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { if (self.displayFramebuffer == nil) { - DispatchQueue.main.async { - self.createDisplayFramebuffer() - - sharedImageProcessingContext.runOperationAsynchronously { - self.displayFramebuffer(framebuffer, fromSourceIndex: fromSourceIndex) - } - } + self.createDisplayFramebuffer() } - else { - self.displayFramebuffer(framebuffer, fromSourceIndex: fromSourceIndex) - } - } - - public func displayFramebuffer(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { self.activateDisplayFramebuffer() clearFramebufferWithColor(backgroundRenderColor) From 7c4359c3246f4c0bbe4b152971f55255c8a0a511 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 24 Dec 2017 21:34:16 -0800 Subject: [PATCH 016/332] flush before calling renderbufferStorage() --- framework/Source/iOS/RenderView.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index c89bdcbd..de92f5b7 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -69,6 +69,7 @@ public class RenderView:UIView, ImageConsumer { displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) + CATransaction.flush() sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.internalLayer) var backingWidth:GLint = 0 From 3bccaa01baff37409b6dd877defda3f229bd996b Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 25 Dec 2017 01:51:50 -0800 Subject: [PATCH 017/332] Fix self reference --- framework/Source/BasicOperation.swift | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index d09b12d0..8a60efee 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -82,8 +82,10 @@ open class BasicOperation: ImageProcessingOperation { // Run on the shared que to prevent unlocking if the framebuffer // is mid-initialization and has not yet been locked - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in - if(self?.renderFramebuffer != nil) { self?.renderFramebuffer.unlock() } + // Also don't reference self since self will have dealloc'd if this is run async + let renderFramebuffer = self.renderFramebuffer + sharedImageProcessingContext.runOperationAsynchronously { + if(renderFramebuffer != nil) { renderFramebuffer!.unlock() } } } From 2baaf07a09a682f32d341d15ee2f5da19875a96e Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 25 Dec 2017 19:27:40 -0800 Subject: [PATCH 018/332] Improve playback speed of MovieInput, add comments --- framework/Source/iOS/MovieInput.swift | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 41f0bfd2..4fbe3838 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -141,17 +141,26 @@ public class MovieInput: ImageSource { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { if (playAtActualSpeed) { // Do this outside of the video processing queue to not slow that down while waiting + + // Sample time eg. first frame is 0,30 second frame is 1,30 let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + // This produces the rolling frame rate let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime) - let currentActualTime = CFAbsoluteTimeGetCurrent() - let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame) - let actualTimeDifference = currentActualTime - previousActualFrameTime + // Frame duration in seconds, shorten it ever so slightly to speed up playback + let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame) - 0.0022 + // Actual time passed since last frame displayed + var actualTimeDifference = CFAbsoluteTimeGetCurrent() - previousActualFrameTime + // If the frame duration is longer than the duration we are actually display them at + // Slow the duration we are actually displaying them at if (frameTimeDifference > actualTimeDifference) { usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference)))) } + //actualTimeDifference = CFAbsoluteTimeGetCurrent() - previousActualFrameTime + //print("frameTime: \(String(format: "%.6f", frameTimeDifference)) actualTime: \(String(format: "%.6f", actualTimeDifference))") + previousFrameTime = currentSampleTime previousActualFrameTime = CFAbsoluteTimeGetCurrent() } From 5cdd099fde1d14cfa6ccbff6c471fd6c628f081b Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 25 Dec 2017 19:28:14 -0800 Subject: [PATCH 019/332] compiler warning --- framework/Source/iOS/MovieInput.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 4fbe3838..e8aef629 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -150,7 +150,7 @@ public class MovieInput: ImageSource { // Frame duration in seconds, shorten it ever so slightly to speed up playback let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame) - 0.0022 // Actual time passed since last frame displayed - var actualTimeDifference = CFAbsoluteTimeGetCurrent() - previousActualFrameTime + let actualTimeDifference = CFAbsoluteTimeGetCurrent() - previousActualFrameTime // If the frame duration is longer than the duration we are actually display them at // Slow the duration we are actually displaying them at From adf1a19ba2774c88377703a276c150924684e0b2 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Tue, 26 Dec 2017 12:52:31 -0800 Subject: [PATCH 020/332] Resolve imageinput framebuffer locking issues --- framework/Source/BasicOperation.swift | 1 - framework/Source/iOS/PictureInput.swift | 7 +++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 8a60efee..dcbc814f 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -113,7 +113,6 @@ open class BasicOperation: ImageProcessingOperation { func renderFrame() { if(renderFramebuffer != nil) { renderFramebuffer.unlock() } - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil:mask != nil) renderFramebuffer.lock() diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 3c6aedf9..f44338d4 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -94,6 +94,7 @@ public class PictureInput: ImageSource { do { // TODO: Alter orientation based on metadata from photo self.imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) + self.imageFramebuffer.lock() } catch { fatalError("ERROR: Unable to initialize framebuffer of size (\(widthToUseForTexture), \(heightToUseForTexture)) with error: \(error)") } @@ -124,6 +125,12 @@ public class PictureInput: ImageSource { guard let image = UIImage(named:imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } + + deinit { + //debugPrint("Deallocating operation: \(self)") + + imageFramebuffer.unlock() + } public func processImage(synchronously:Bool = false) { if synchronously { From 5dac9731f6b71e217e85f45a17e1e4d7f89f5660 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Tue, 26 Dec 2017 12:59:14 -0800 Subject: [PATCH 021/332] Resolve playback issues where movie will continue playing after deinit --- framework/Source/iOS/MovieInput.swift | 6 ++++++ framework/Source/iOS/RenderView.swift | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index e8aef629..c75d6bc4 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -35,6 +35,10 @@ public class MovieInput: ImageSource { let inputAsset = AVURLAsset(url:url, options:inputOptions) try self.init(asset:inputAsset, videoComposition: nil, playAtActualSpeed:playAtActualSpeed, loop:loop) } + + deinit { + self.cancel() + } // MARK: - // MARK: Playback control @@ -79,6 +83,7 @@ public class MovieInput: ImageSource { DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: { guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return } guard let assetReader = self.assetReader else { return } + guard self.started else { return } do { try ObjC.catchException { @@ -168,6 +173,7 @@ public class MovieInput: ImageSource { sharedImageProcessingContext.runOperationSynchronously{ self.process(movieFrame:sampleBuffer) CMSampleBufferInvalidate(sampleBuffer) + } } else { if (!loop) { diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index de92f5b7..b7d69e4e 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -1,13 +1,13 @@ import UIKit -protocol RenderViewDelegate: class { +public protocol RenderViewDelegate: class { func didDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) } // TODO: Add support for transparency // TODO: Deal with view resizing public class RenderView:UIView, ImageConsumer { - weak var delegate:RenderViewDelegate? + public weak var delegate:RenderViewDelegate? public var backgroundRenderColor = Color.black public var fillMode = FillMode.preserveAspectRatio From dcbe197bf5c3458088865bb810af89bc5aa95b17 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Wed, 27 Dec 2017 14:54:19 -0800 Subject: [PATCH 022/332] Dispatch PictureIput asynchronously Remove framebuffer relay from basic operation --- framework/Source/BasicOperation.swift | 18 +-- framework/Source/iOS/PictureInput.swift | 152 ++++++++++++------------ 2 files changed, 80 insertions(+), 90 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index dcbc814f..68b337df 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -79,14 +79,6 @@ open class BasicOperation: ImageProcessingOperation { deinit { //debugPrint("Deallocating operation: \(self)") - - // Run on the shared que to prevent unlocking if the framebuffer - // is mid-initialization and has not yet been locked - // Also don't reference self since self will have dealloc'd if this is run async - let renderFramebuffer = self.renderFramebuffer - sharedImageProcessingContext.runOperationAsynchronously { - if(renderFramebuffer != nil) { renderFramebuffer!.unlock() } - } } // MARK: - @@ -112,9 +104,7 @@ open class BasicOperation: ImageProcessingOperation { } func renderFrame() { - if(renderFramebuffer != nil) { renderFramebuffer.unlock() } renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil:mask != nil) - renderFramebuffer.lock() let textureProperties = initialTextureProperties() configureFramebufferSpecificUniforms(inputFramebuffers[0]!) @@ -193,10 +183,10 @@ open class BasicOperation: ImageProcessingOperation { } public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { - guard let renderFramebuffer = self.renderFramebuffer, (!renderFramebuffer.timingStyle.isTransient()) else { return } - - renderFramebuffer.lock() - target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) + //guard let renderFramebuffer = self.renderFramebuffer, (!renderFramebuffer.timingStyle.isTransient()) else { return } + + //renderFramebuffer.lock() + //target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) } } diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index f44338d4..a8ad6ad3 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -7,90 +7,89 @@ public class PictureInput: ImageSource { var hasProcessedImage:Bool = false public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - // TODO: Dispatch this whole thing asynchronously to move image loading off main thread - let widthOfImage = GLint(image.width) - let heightOfImage = GLint(image.height) - - // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. - guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } + sharedImageProcessingContext.runOperationAsynchronously{ + let widthOfImage = GLint(image.width) + let heightOfImage = GLint(image.height) + + // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. + guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } - var widthToUseForTexture = widthOfImage - var heightToUseForTexture = heightOfImage - var shouldRedrawUsingCoreGraphics = false - - // For now, deal with images larger than the maximum texture size by resizing to be within that limit - let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width:Float(widthOfImage), height:Float(heightOfImage)))) - if ((scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage)) { - widthToUseForTexture = scaledImageSizeToFitOnGPU.width - heightToUseForTexture = scaledImageSizeToFitOnGPU.height - shouldRedrawUsingCoreGraphics = true - } - - if (smoothlyScaleOutput) { - // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill - let powerClosestToWidth = ceil(log2(Float(widthToUseForTexture))) - let powerClosestToHeight = ceil(log2(Float(heightToUseForTexture))) + var widthToUseForTexture = widthOfImage + var heightToUseForTexture = heightOfImage + var shouldRedrawUsingCoreGraphics = false - widthToUseForTexture = GLint(round(pow(2.0, powerClosestToWidth))) - heightToUseForTexture = GLint(round(pow(2.0, powerClosestToHeight))) - shouldRedrawUsingCoreGraphics = true - } - - var imageData:UnsafeMutablePointer! - var dataFromImageDataProvider:CFData! - var format = GL_BGRA - - if (!shouldRedrawUsingCoreGraphics) { - /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to - * tell GL about the memory layout with GLES. - */ - if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) - { + // For now, deal with images larger than the maximum texture size by resizing to be within that limit + let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width:Float(widthOfImage), height:Float(heightOfImage)))) + if ((scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage)) { + widthToUseForTexture = scaledImageSizeToFitOnGPU.width + heightToUseForTexture = scaledImageSizeToFitOnGPU.height shouldRedrawUsingCoreGraphics = true - } else { - /* Check that the bitmap pixel format is compatible with GL */ - let bitmapInfo = image.bitmapInfo - if (bitmapInfo.contains(.floatComponents)) { - /* We don't support float components for use directly in GL */ + } + + if (smoothlyScaleOutput) { + // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill + let powerClosestToWidth = ceil(log2(Float(widthToUseForTexture))) + let powerClosestToHeight = ceil(log2(Float(heightToUseForTexture))) + + widthToUseForTexture = GLint(round(pow(2.0, powerClosestToWidth))) + heightToUseForTexture = GLint(round(pow(2.0, powerClosestToHeight))) + shouldRedrawUsingCoreGraphics = true + } + + var imageData:UnsafeMutablePointer! + var dataFromImageDataProvider:CFData! + var format = GL_BGRA + + if (!shouldRedrawUsingCoreGraphics) { + /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to + * tell GL about the memory layout with GLES. + */ + if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) + { shouldRedrawUsingCoreGraphics = true } else { - let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) - if (bitmapInfo.contains(.byteOrder32Little)) { - /* Little endian, for alpha-first we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { - shouldRedrawUsingCoreGraphics = true - } - } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { - /* Big endian, for alpha-last we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { - shouldRedrawUsingCoreGraphics = true - } else { - /* Can access directly using GL_RGBA pixel format */ - format = GL_RGBA + /* Check that the bitmap pixel format is compatible with GL */ + let bitmapInfo = image.bitmapInfo + if (bitmapInfo.contains(.floatComponents)) { + /* We don't support float components for use directly in GL */ + shouldRedrawUsingCoreGraphics = true + } else { + let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) + if (bitmapInfo.contains(.byteOrder32Little)) { + /* Little endian, for alpha-first we can use this bitmap directly in GL */ + if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { + shouldRedrawUsingCoreGraphics = true + } + } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { + /* Big endian, for alpha-last we can use this bitmap directly in GL */ + if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { + shouldRedrawUsingCoreGraphics = true + } else { + /* Can access directly using GL_RGBA pixel format */ + format = GL_RGBA + } } } } } - } - - // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); - - if (shouldRedrawUsingCoreGraphics) { - // For resized or incompatible image: redraw - imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) - - let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() - let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) - // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html - imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) - } else { - // Access the raw image bytes directly - dataFromImageDataProvider = image.dataProvider?.data - imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)) - } + // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); + + if (shouldRedrawUsingCoreGraphics) { + // For resized or incompatible image: redraw + imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + + let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() + + let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) + // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html + imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) + } else { + // Access the raw image bytes directly + dataFromImageDataProvider = image.dataProvider?.data + imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)) + } - sharedImageProcessingContext.runOperationSynchronously{ do { // TODO: Alter orientation based on metadata from photo self.imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) @@ -110,11 +109,12 @@ public class PictureInput: ImageSource { glGenerateMipmap(GLenum(GL_TEXTURE_2D)) } glBindTexture(GLenum(GL_TEXTURE_2D), 0) - } - if (shouldRedrawUsingCoreGraphics) { - imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + if (shouldRedrawUsingCoreGraphics) { + imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + } } + } public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { From baded58d6ebb995488b2a51f3e3fca1f083addb2 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Wed, 27 Dec 2017 19:10:47 -0800 Subject: [PATCH 023/332] Resolve pipline target multi threading issues --- framework/Source/Pipeline.swift | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index cce38731..f96254d7 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -65,16 +65,21 @@ public extension ImageSource { } public func updateTargetsWithFramebuffer(_ framebuffer:Framebuffer) { - if targets.count == 0 { // Deal with the case where no targets are attached by immediately returning framebuffer to cache + var foundTargets = [(ImageConsumer, UInt)]() + for target in targets { + foundTargets.append(target) + } + + if foundTargets.count == 0 { // Deal with the case where no targets are attached by immediately returning framebuffer to cache framebuffer.lock() framebuffer.unlock() } else { // Lock first for each output, to guarantee proper ordering on multi-output operations - for _ in targets { + for _ in foundTargets { framebuffer.lock() } } - for (target, index) in targets { + for (target, index) in foundTargets { target.newFramebufferAvailable(framebuffer, fromSourceIndex:index) } } @@ -104,8 +109,10 @@ class WeakImageConsumer { } public class TargetContainer:Sequence { - var targets = [WeakImageConsumer]() - var count:Int { get { return targets.count } } + private var targets = [WeakImageConsumer]() + + private var count:Int { get { return targets.count } } + #if !os(Linux) let dispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.targetContainerQueue", attributes: []) #endif From 472e3f71fa4011f7d45f5dab8cb6fc2234a0d20a Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Wed, 27 Dec 2017 20:25:09 -0800 Subject: [PATCH 024/332] Improved context switching Revert async imageinput --- framework/Source/Framebuffer.swift | 28 +++-- framework/Source/iOS/MovieOutput.swift | 10 +- framework/Source/iOS/PictureInput.swift | 134 ++++++++++++------------ 3 files changed, 90 insertions(+), 82 deletions(-) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 5af05417..53a0dc8c 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -57,7 +57,7 @@ public class Framebuffer { let hash:Int64 let textureOverride:Bool - weak var context:OpenGLContext? + unowned var context:OpenGLContext public init(context:OpenGLContext, orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false, overriddenTexture:GLuint? = nil) throws { self.context = context @@ -144,7 +144,7 @@ public class Framebuffer { } public func texturePropertiesForOutputRotation(_ rotation:Rotation) -> InputTextureProperties { - return InputTextureProperties(textureVBO:context!.textureVBO(for:rotation), texture:texture) + return InputTextureProperties(textureVBO:context.textureVBO(for:rotation), texture:texture) } public func texturePropertiesForTargetOrientation(_ targetOrientation:ImageOrientation) -> InputTextureProperties { @@ -163,21 +163,29 @@ public class Framebuffer { weak var cache:FramebufferCache? var framebufferRetainCount = 0 func lock() { - framebufferRetainCount += 1 + context.runOperationSynchronously { + framebufferRetainCount += 1 + } } func resetRetainCount() { - framebufferRetainCount = 0 + context.runOperationSynchronously { + framebufferRetainCount = 0 + } } public func unlock() { - framebufferRetainCount -= 1 - if (framebufferRetainCount < 1) { - if ((framebufferRetainCount < 0) && (cache != nil)) { - print("WARNING: Tried to overrelease a framebuffer") + // Make sure this gets run on the current framebuffer context + // In the event this framebuffer is used on a different context + context.runOperationSynchronously { + framebufferRetainCount -= 1 + if (framebufferRetainCount < 1) { + if ((framebufferRetainCount < 0) && (cache != nil)) { + print("WARNING: Tried to overrelease a framebuffer") + } + framebufferRetainCount = 0 + cache?.returnToCache(self) } - framebufferRetainCount = 0 - cache?.returnToCache(self) } } } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index a79403d9..6be0186e 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -31,10 +31,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var pixelBuffer:CVPixelBuffer? = nil var renderFramebuffer:Framebuffer! - let movieProcessingContext: OpenGLContext + let movieProcessingContext:OpenGLContext public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup + // Since we cannot access self before calling super, initialize here and not above let movieProcessingContext = OpenGLContext() if movieProcessingContext.supportsTextureCaches() { @@ -232,11 +233,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if !self.movieProcessingContext.supportsTextureCaches() { self.pixelBuffer = nil } - - sharedImageProcessingContext.runOperationAsynchronously { - //Must be called from the context it came from - framebuffer.unlock() - } + + framebuffer.unlock() } } diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index a8ad6ad3..eea6d4d1 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -7,78 +7,78 @@ public class PictureInput: ImageSource { var hasProcessedImage:Bool = false public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - sharedImageProcessingContext.runOperationAsynchronously{ - let widthOfImage = GLint(image.width) - let heightOfImage = GLint(image.height) - - // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. - guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } - - var widthToUseForTexture = widthOfImage - var heightToUseForTexture = heightOfImage - var shouldRedrawUsingCoreGraphics = false - - // For now, deal with images larger than the maximum texture size by resizing to be within that limit - let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width:Float(widthOfImage), height:Float(heightOfImage)))) - if ((scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage)) { - widthToUseForTexture = scaledImageSizeToFitOnGPU.width - heightToUseForTexture = scaledImageSizeToFitOnGPU.height - shouldRedrawUsingCoreGraphics = true - } + let widthOfImage = GLint(image.width) + let heightOfImage = GLint(image.height) + + // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. + guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } + + var widthToUseForTexture = widthOfImage + var heightToUseForTexture = heightOfImage + var shouldRedrawUsingCoreGraphics = false + + // For now, deal with images larger than the maximum texture size by resizing to be within that limit + let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width:Float(widthOfImage), height:Float(heightOfImage)))) + if ((scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage)) { + widthToUseForTexture = scaledImageSizeToFitOnGPU.width + heightToUseForTexture = scaledImageSizeToFitOnGPU.height + shouldRedrawUsingCoreGraphics = true + } + + if (smoothlyScaleOutput) { + // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill + let powerClosestToWidth = ceil(log2(Float(widthToUseForTexture))) + let powerClosestToHeight = ceil(log2(Float(heightToUseForTexture))) - if (smoothlyScaleOutput) { - // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill - let powerClosestToWidth = ceil(log2(Float(widthToUseForTexture))) - let powerClosestToHeight = ceil(log2(Float(heightToUseForTexture))) - - widthToUseForTexture = GLint(round(pow(2.0, powerClosestToWidth))) - heightToUseForTexture = GLint(round(pow(2.0, powerClosestToHeight))) + widthToUseForTexture = GLint(round(pow(2.0, powerClosestToWidth))) + heightToUseForTexture = GLint(round(pow(2.0, powerClosestToHeight))) + shouldRedrawUsingCoreGraphics = true + } + + var imageData:UnsafeMutablePointer! + var dataFromImageDataProvider:CFData! + var format = GL_BGRA + + if (!shouldRedrawUsingCoreGraphics) { + /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to + * tell GL about the memory layout with GLES. + */ + if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) + { shouldRedrawUsingCoreGraphics = true - } - - var imageData:UnsafeMutablePointer! - var dataFromImageDataProvider:CFData! - var format = GL_BGRA - - if (!shouldRedrawUsingCoreGraphics) { - /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to - * tell GL about the memory layout with GLES. - */ - if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) - { + } else { + /* Check that the bitmap pixel format is compatible with GL */ + let bitmapInfo = image.bitmapInfo + if (bitmapInfo.contains(.floatComponents)) { + /* We don't support float components for use directly in GL */ shouldRedrawUsingCoreGraphics = true } else { - /* Check that the bitmap pixel format is compatible with GL */ - let bitmapInfo = image.bitmapInfo - if (bitmapInfo.contains(.floatComponents)) { - /* We don't support float components for use directly in GL */ - shouldRedrawUsingCoreGraphics = true - } else { - let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) - if (bitmapInfo.contains(.byteOrder32Little)) { - /* Little endian, for alpha-first we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { - shouldRedrawUsingCoreGraphics = true - } - } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { - /* Big endian, for alpha-last we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { - shouldRedrawUsingCoreGraphics = true - } else { - /* Can access directly using GL_RGBA pixel format */ - format = GL_RGBA - } + let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) + if (bitmapInfo.contains(.byteOrder32Little)) { + /* Little endian, for alpha-first we can use this bitmap directly in GL */ + if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { + shouldRedrawUsingCoreGraphics = true + } + } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { + /* Big endian, for alpha-last we can use this bitmap directly in GL */ + if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { + shouldRedrawUsingCoreGraphics = true + } else { + /* Can access directly using GL_RGBA pixel format */ + format = GL_RGBA } } } } - + } + + sharedImageProcessingContext.runOperationSynchronously{ // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); if (shouldRedrawUsingCoreGraphics) { // For resized or incompatible image: redraw imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) - + let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) @@ -89,7 +89,7 @@ public class PictureInput: ImageSource { dataFromImageDataProvider = image.dataProvider?.data imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)) } - + do { // TODO: Alter orientation based on metadata from photo self.imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) @@ -109,18 +109,20 @@ public class PictureInput: ImageSource { glGenerateMipmap(GLenum(GL_TEXTURE_2D)) } glBindTexture(GLenum(GL_TEXTURE_2D), 0) + + + } - if (shouldRedrawUsingCoreGraphics) { - imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) - } + if (shouldRedrawUsingCoreGraphics) { + imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) } - + } public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } - + public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { guard let image = UIImage(named:imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) @@ -131,7 +133,7 @@ public class PictureInput: ImageSource { imageFramebuffer.unlock() } - + public func processImage(synchronously:Bool = false) { if synchronously { sharedImageProcessingContext.runOperationSynchronously{ From e9fc72feb2688eb96dfeaa080a21549d1c550257 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 28 Dec 2017 02:34:32 -0800 Subject: [PATCH 025/332] Use mach_wait_until for precise playback timing Playback speed is more acurate and resolves issue where 60fps video begins to slow down significantly after watching for an extended time (5-10 mins). In addition 60fps video is more smooth --- framework/Source/iOS/MovieInput.swift | 153 +++++++++++++++++--------- 1 file changed, 101 insertions(+), 52 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index c75d6bc4..971a5662 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -11,9 +11,7 @@ public class MovieInput: ImageSource { var started = false let playAtActualSpeed:Bool public var loop:Bool - var videoEncodingIsFinished = false var previousFrameTime = kCMTimeZero - var previousActualFrameTime = CFAbsoluteTimeGetCurrent() var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 @@ -80,7 +78,7 @@ public class MovieInput: ImageSource { self.started = true asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{ - DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: { + DispatchQueue.global().async(qos: .background) { guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return } guard let assetReader = self.assetReader else { return } guard self.started else { return } @@ -98,29 +96,8 @@ public class MovieInput: ImageSource { return } - var readerVideoTrackOutput:AVAssetReaderOutput? = nil; - - for output in assetReader.outputs { - if(output.mediaType == AVMediaTypeVideo) { - readerVideoTrackOutput = output; - } - } - - while (assetReader.status == .reading) { - self.readNextVideoFrame(from:readerVideoTrackOutput!) - } - - if (assetReader.status == .completed) { - assetReader.cancelReading() - - if (self.loop) { - self.endProcessing() - self.start() - } else { - self.endProcessing() - } - } - }) + Thread.detachNewThreadSelector(#selector(self.beginReading), toTarget: self, with: nil) + } }) } @@ -139,47 +116,73 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Internal processing functions + @objc func beginReading() { + guard let assetReader = self.assetReader else { return } + + var readerVideoTrackOutput:AVAssetReaderOutput? = nil; + + for output in assetReader.outputs { + if(output.mediaType == AVMediaTypeVideo) { + readerVideoTrackOutput = output; + } + } + + self.configureThread() + + while(assetReader.status == .reading) { + self.readNextVideoFrame(from:readerVideoTrackOutput!) + } + + if (assetReader.status == .completed) { + assetReader.cancelReading() + + if (self.loop) { + self.endProcessing() + self.start() + } else { + self.endProcessing() + } + } + } + func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { guard let assetReader = self.assetReader else { return } - if ((assetReader.status == .reading) && !videoEncodingIsFinished) { + let renderStart = DispatchTime.now() + var frameDurationNanos: Float64 = 0 + + if (assetReader.status == .reading) { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { if (playAtActualSpeed) { // Do this outside of the video processing queue to not slow that down while waiting // Sample time eg. first frame is 0,30 second frame is 1,30 let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - // This produces the rolling frame rate - let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime) - - // Frame duration in seconds, shorten it ever so slightly to speed up playback - let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame) - 0.0022 - // Actual time passed since last frame displayed - let actualTimeDifference = CFAbsoluteTimeGetCurrent() - previousActualFrameTime - // If the frame duration is longer than the duration we are actually display them at - // Slow the duration we are actually displaying them at - if (frameTimeDifference > actualTimeDifference) { - usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference)))) - } - - //actualTimeDifference = CFAbsoluteTimeGetCurrent() - previousActualFrameTime - //print("frameTime: \(String(format: "%.6f", frameTimeDifference)) actualTime: \(String(format: "%.6f", actualTimeDifference))") + // Retrieve the rolling frame rate (duration between each frame) + let frameDuration = CMTimeSubtract(currentSampleTime, previousFrameTime) + frameDurationNanos = CMTimeGetSeconds(frameDuration) * 1_000_000_000 - previousFrameTime = currentSampleTime - previousActualFrameTime = CFAbsoluteTimeGetCurrent() + self.previousFrameTime = currentSampleTime } - + sharedImageProcessingContext.runOperationSynchronously{ self.process(movieFrame:sampleBuffer) CMSampleBufferInvalidate(sampleBuffer) - } - } else { - if (!loop) { - videoEncodingIsFinished = true - if (videoEncodingIsFinished) { - self.endProcessing() + + if(playAtActualSpeed) { + let renderEnd = DispatchTime.now() + + // Find the amount of time it took to display the last frame in microseconds + let renderDurationNanos = Double(renderEnd.uptimeNanoseconds - renderStart.uptimeNanoseconds) + + // Find how much time we should wait to display the next frame. So it would be the frame duration minus the + // amount of time we already spent rendering the current frame. + let waitDurationNanos = Int(frameDurationNanos - renderDurationNanos) + + if(waitDurationNanos > 0) { + mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(waitDurationNanos))) } } } @@ -189,7 +192,7 @@ public class MovieInput: ImageSource { // self.endProcessing() // } // } - + } func process(movieFrame frame:CMSampleBuffer) { @@ -296,4 +299,50 @@ public class MovieInput: ImageSource { public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { // Not needed for movie inputs } + + // MARK: - + // MARK: Thread configuration + + var timebaseInfo = mach_timebase_info_data_t() + + func configureThread() { + mach_timebase_info(&timebaseInfo) + let clock2abs = Double(timebaseInfo.denom) / Double(timebaseInfo.numer) * Double(NSEC_PER_MSEC) + + let period = UInt32(0.00 * clock2abs) + // Setup for 30 milliseconds of work + // The anticpated render duration is in the 10-30 ms range on an iPhone 6 for 1080p video with no filters + // If the computation value is set too high, setting the thread policy will fail + let computation = UInt32(30 * clock2abs) + // With filters the upper bound is unlimited but with a lot of approximation it falls in the 20-100 ms range with 1080p video + // If we surpass our constraint the computation is scheduled for a later point + // You can test this by setting a low constraint and then applying a filter + let constraint = UInt32(100 * clock2abs) + + let THREAD_TIME_CONSTRAINT_POLICY_COUNT = mach_msg_type_number_t(MemoryLayout.size / MemoryLayout.size) + + var policy = thread_time_constraint_policy() + var ret: Int32 + let thread: thread_port_t = pthread_mach_thread_np(pthread_self()) + + policy.period = period + policy.computation = computation + policy.constraint = constraint + policy.preemptible = 0 + + ret = withUnsafeMutablePointer(to: &policy) { + $0.withMemoryRebound(to: integer_t.self, capacity: Int(THREAD_TIME_CONSTRAINT_POLICY_COUNT)) { + thread_policy_set(thread, UInt32(THREAD_TIME_CONSTRAINT_POLICY), $0, THREAD_TIME_CONSTRAINT_POLICY_COUNT) + } + } + + if ret != KERN_SUCCESS { + mach_error("thread_policy_set:", ret) + fatalError("Unable to configure thread") + } + } + + func nanosToAbs(_ nanos: UInt64) -> UInt64 { + return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer) + } } From 8663108b68d2d12d1603b72b4ce3f78403adb612 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 4 Jan 2018 23:59:22 -0800 Subject: [PATCH 026/332] Fix cpu slowdown for movieplayback Remove unnecessary sync statements Fix drawable properties of renderview --- framework/Source/Framebuffer.swift | 24 ++--- framework/Source/FramebufferCache.swift | 4 +- framework/Source/iOS/MovieInput.swift | 132 ++++++++++++----------- framework/Source/iOS/OpenGLContext.swift | 2 +- framework/Source/iOS/RenderView.swift | 2 +- 5 files changed, 81 insertions(+), 83 deletions(-) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 53a0dc8c..4cc13266 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -163,29 +163,21 @@ public class Framebuffer { weak var cache:FramebufferCache? var framebufferRetainCount = 0 func lock() { - context.runOperationSynchronously { - framebufferRetainCount += 1 - } + framebufferRetainCount += 1 } func resetRetainCount() { - context.runOperationSynchronously { - framebufferRetainCount = 0 - } + framebufferRetainCount = 0 } public func unlock() { - // Make sure this gets run on the current framebuffer context - // In the event this framebuffer is used on a different context - context.runOperationSynchronously { - framebufferRetainCount -= 1 - if (framebufferRetainCount < 1) { - if ((framebufferRetainCount < 0) && (cache != nil)) { - print("WARNING: Tried to overrelease a framebuffer") - } - framebufferRetainCount = 0 - cache?.returnToCache(self) + framebufferRetainCount -= 1 + if (framebufferRetainCount < 1) { + if ((framebufferRetainCount < 0) && (cache != nil)) { + print("WARNING: Tried to overrelease a framebuffer") } + framebufferRetainCount = 0 + cache?.returnToCache(self) } } } diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index 7b905d89..265c7903 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -26,7 +26,7 @@ public class FramebufferCache { let hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) let framebuffer:Framebuffer if ((framebufferCache[hash]?.count ?? -1) > 0) { -// print("Restoring previous framebuffer") + //print("Restoring previous framebuffer") framebuffer = framebufferCache[hash]!.removeLast() framebuffer.orientation = orientation } else { @@ -47,7 +47,7 @@ public class FramebufferCache { } func returnToCache(_ framebuffer:Framebuffer) { -// print("Returning to cache: \(framebuffer)") + //sprint("Returning to cache: \(framebuffer)") context.runOperationSynchronously{ if (self.framebufferCache[framebuffer.hash] != nil) { self.framebufferCache[framebuffer.hash]!.append(framebuffer) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 971a5662..b9f7e341 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -58,6 +58,7 @@ public class MovieInput: ImageSource { else { let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: AVMediaTypeVideo), videoSettings: outputSettings) readerVideoTrackOutput.videoComposition = self.videoComposition + readerVideoTrackOutput.alwaysCopiesSampleData = false assetReader!.add(readerVideoTrackOutput) } @@ -103,16 +104,15 @@ public class MovieInput: ImageSource { public func cancel() { if let assetReader = self.assetReader { + // Make sure this is called before cancelling the reader + // If you don't and this is a looping video it will just start the video over again + self.started = false + assetReader.cancelReading() - self.endProcessing() + self.assetReader = nil } } - func endProcessing() { - self.assetReader = nil - self.started = false - } - // MARK: - // MARK: Internal processing functions @@ -133,57 +133,56 @@ public class MovieInput: ImageSource { self.readNextVideoFrame(from:readerVideoTrackOutput!) } - if (assetReader.status == .completed) { - assetReader.cancelReading() - - if (self.loop) { - self.endProcessing() - self.start() - } else { - self.endProcessing() - } + // Make sure the video is still playing and has not been cancelled by user + if (self.loop && self.started) { + self.cancel() + self.start() + } else { + self.cancel() } } func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { guard let assetReader = self.assetReader else { return } - - let renderStart = DispatchTime.now() - var frameDurationNanos: Float64 = 0 - - if (assetReader.status == .reading) { - if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { - if (playAtActualSpeed) { - // Do this outside of the video processing queue to not slow that down while waiting - - // Sample time eg. first frame is 0,30 second frame is 1,30 - let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - - // Retrieve the rolling frame rate (duration between each frame) - let frameDuration = CMTimeSubtract(currentSampleTime, previousFrameTime) - frameDurationNanos = CMTimeGetSeconds(frameDuration) * 1_000_000_000 - - self.previousFrameTime = currentSampleTime - } + + if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { + + let renderStart = DispatchTime.now() + var frameDurationNanos: Float64 = 0 + + if (playAtActualSpeed) { + // Sample time eg. first frame is 0,30 second frame is 1,30 + let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - sharedImageProcessingContext.runOperationSynchronously{ - self.process(movieFrame:sampleBuffer) - CMSampleBufferInvalidate(sampleBuffer) - } + // Retrieve the rolling frame rate (duration between each frame) + let frameDuration = CMTimeSubtract(currentSampleTime, previousFrameTime) + frameDurationNanos = CMTimeGetSeconds(frameDuration) * 1_000_000_000 - if(playAtActualSpeed) { - let renderEnd = DispatchTime.now() - - // Find the amount of time it took to display the last frame in microseconds - let renderDurationNanos = Double(renderEnd.uptimeNanoseconds - renderStart.uptimeNanoseconds) - - // Find how much time we should wait to display the next frame. So it would be the frame duration minus the - // amount of time we already spent rendering the current frame. - let waitDurationNanos = Int(frameDurationNanos - renderDurationNanos) - - if(waitDurationNanos > 0) { - mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(waitDurationNanos))) - } + self.previousFrameTime = currentSampleTime + } + + sharedImageProcessingContext.runOperationSynchronously{ + self.process(movieFrame:sampleBuffer) + CMSampleBufferInvalidate(sampleBuffer) + } + + if(playAtActualSpeed) { + let renderEnd = DispatchTime.now() + + // Find the amount of time it took to display the last frame + let renderDurationNanos = Double(renderEnd.uptimeNanoseconds - renderStart.uptimeNanoseconds) + + // Find how much time we should wait to display the next frame. So it would be the frame duration minus the + // amount of time we already spent rendering the current frame. + let waitDurationNanos = Int(frameDurationNanos - renderDurationNanos) + + // When the wait duration begins returning negative values consistently + // It means the OS is unable to provide enough processing time for the above work + // and that you need to adjust the real time thread policy below + //print("Render duration: \(String(format: "%.4f",renderDurationNanos / 1_000_000)) ms Wait duration: \(String(format: "%.4f",Double(waitDurationNanos) / 1_000_000)) ms") + + if(waitDurationNanos > 0) { + mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(waitDurationNanos))) } } } @@ -203,6 +202,7 @@ public class MovieInput: ImageSource { self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) } + //Code from pull request https://github.com/BradLarson/GPUImage2/pull/183 func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) @@ -248,7 +248,6 @@ public class MovieInput: ImageSource { return } - luminanceFramebuffer.cache = sharedImageProcessingContext.framebufferCache luminanceFramebuffer.lock() var chrominanceGLTexture: CVOpenGLESTexture? @@ -276,7 +275,6 @@ public class MovieInput: ImageSource { return } - chrominanceFramebuffer.cache = sharedImageProcessingContext.framebufferCache chrominanceFramebuffer.lock() let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) @@ -295,7 +293,7 @@ public class MovieInput: ImageSource { print("Current frame time : \(1000.0 * currentFrameTime) ms") } } - + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { // Not needed for movie inputs } @@ -309,15 +307,23 @@ public class MovieInput: ImageSource { mach_timebase_info(&timebaseInfo) let clock2abs = Double(timebaseInfo.denom) / Double(timebaseInfo.numer) * Double(NSEC_PER_MSEC) - let period = UInt32(0.00 * clock2abs) - // Setup for 30 milliseconds of work - // The anticpated render duration is in the 10-30 ms range on an iPhone 6 for 1080p video with no filters - // If the computation value is set too high, setting the thread policy will fail - let computation = UInt32(30 * clock2abs) - // With filters the upper bound is unlimited but with a lot of approximation it falls in the 20-100 ms range with 1080p video - // If we surpass our constraint the computation is scheduled for a later point - // You can test this by setting a low constraint and then applying a filter - let constraint = UInt32(100 * clock2abs) + // http://docs.huihoo.com/darwin/kernel-programming-guide/scheduler/chapter_8_section_4.html + // + // To see the impact of adjusting these values, uncomment the print statement above mach_wait_until() in self.readNextVideoFrame() + // + // Setup for 5 ms of work. + // The anticpated frame render duration is in the 1-3 ms range on an iPhone 6 for 1080p without filters and 1-7 ms range with filters + // If the render duration is allowed to exceed 16ms (the duration of a frame in 60fps video) + // the 60fps video will no longer be playing in real time. + // If this computation value is set too high, setting the thread policy will fail. + let computation = UInt32(5 * clock2abs) + // Tell the scheduler the next 20 ms of work needs to be done as soon as possible. + let period = UInt32(0 * clock2abs) + // According to the above scheduling chapter this constraint only appears relevant + // if preemtible is set to true and the period is not 0. If this is wrong, please let me know. + let constraint = UInt32(5 * clock2abs) + + //print("period: \(period) computation: \(computation) constraint: \(constraint)") let THREAD_TIME_CONSTRAINT_POLICY_COUNT = mach_msg_type_number_t(MemoryLayout.size / MemoryLayout.size) @@ -328,7 +334,7 @@ public class MovieInput: ImageSource { policy.period = period policy.computation = computation policy.constraint = constraint - policy.preemptible = 0 + policy.preemptible = 0 // Very important, otherwise video will slow down over time. ret = withUnsafeMutablePointer(to: &policy) { $0.withMemoryRebound(to: integer_t.self, capacity: Int(THREAD_TIME_CONSTRAINT_POLICY_COUNT)) { diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index 64975cc6..b8744496 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -27,7 +27,7 @@ public class OpenGLContext: SerialDispatch { }() - public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) + public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", qos: .userInteractive) public let dispatchQueueKey = DispatchSpecificKey() public let dispatchQueueKeyValue: Int diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index b7d69e4e..ed3879f4 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -49,7 +49,7 @@ public class RenderView:UIView, ImageConsumer { let eaglLayer = self.layer as! CAEAGLLayer eaglLayer.isOpaque = true - eaglLayer.drawableProperties = [NSNumber(value:false): kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8: kEAGLDrawablePropertyColorFormat] + eaglLayer.drawableProperties = [kEAGLDrawablePropertyRetainedBacking: NSNumber(value:false), kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8] self.internalLayer = eaglLayer } From 966bcdecdaa2299536a2140340474211197c5a1d Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 8 Jan 2018 00:37:25 -0800 Subject: [PATCH 027/332] Resolve render view distortion issue Resolves an issue where the contents of the renderview would be forever distorted if it recieved framebuffers before the view finished being layed out. Also resolve hard crashes if the render view recieved framebuffers before appearing --- framework/Source/iOS/RenderView.swift | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index ed3879f4..a2b86af1 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -44,6 +44,16 @@ public class RenderView:UIView, ImageConsumer { } } + override public var bounds: CGRect { + didSet { + // Check if the size changed + if(oldValue.size != self.bounds.size) { + // Destroy the displayFramebuffer so we render at the correct size + self.destroyDisplayFramebuffer() + } + } + } + func commonInit() { self.contentScaleFactor = UIScreen.main.scale @@ -58,7 +68,7 @@ public class RenderView:UIView, ImageConsumer { destroyDisplayFramebuffer() } - func createDisplayFramebuffer() { + func createDisplayFramebuffer() -> Bool { var newDisplayFramebuffer:GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) displayFramebuffer = newDisplayFramebuffer @@ -79,15 +89,19 @@ public class RenderView:UIView, ImageConsumer { backingSize = GLSize(width:backingWidth, height:backingHeight) guard ((backingWidth > 0) && (backingHeight > 0)) else { - fatalError("View had a zero size") + print("Warning: View had a zero size") + return false } glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_RENDERBUFFER), displayRenderbuffer!) let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) if (status != GLenum(GL_FRAMEBUFFER_COMPLETE)) { - fatalError("Display framebuffer creation failed with error: \(FramebufferCreationError(errorCode:status))") + print("Warning: Display framebuffer creation failed with error: \(FramebufferCreationError(errorCode:status))") + return false } + + return true } func destroyDisplayFramebuffer() { @@ -112,8 +126,9 @@ public class RenderView:UIView, ImageConsumer { } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - if (self.displayFramebuffer == nil) { - self.createDisplayFramebuffer() + if (self.displayFramebuffer == nil && !self.createDisplayFramebuffer()) { + // Bail if we couldn't successfully create the displayFramebuffer + return } self.activateDisplayFramebuffer() From a9a610ef9d22a6355b7f20e57d880350b4eae23e Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Wed, 24 Jan 2018 17:05:14 -0800 Subject: [PATCH 028/332] Refactored movieinput, include sample times on framebuffers from camera --- framework/Source/BasicOperation.swift | 3 + framework/Source/Framebuffer.swift | 3 + framework/Source/FramebufferCache.swift | 1 + framework/Source/Pipeline.swift | 2 +- framework/Source/iOS/MovieInput.swift | 106 +++++++++++------------- framework/Source/iOS/MovieOutput.swift | 2 + framework/Source/iOS/RenderView.swift | 2 +- 7 files changed, 60 insertions(+), 59 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 68b337df..6c058f34 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -99,6 +99,9 @@ open class BasicOperation: ImageProcessingOperation { if (UInt(inputFramebuffers.count) >= maximumInputs) { renderFrame() + // Carry this over if we have it + outputFramebuffer.sampleTime = framebuffer.sampleTime + updateTargetsWithFramebuffer(outputFramebuffer) } } diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 4cc13266..6bfbcdc2 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -15,6 +15,7 @@ import Glibc #endif import Foundation +import AVFoundation // TODO: Add a good lookup table to this to allow for detailed error messages struct FramebufferCreationError:Error { @@ -59,6 +60,8 @@ public class Framebuffer { unowned var context:OpenGLContext + public var sampleTime: CMTime? + public init(context:OpenGLContext, orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false, overriddenTexture:GLuint? = nil) throws { self.context = context self.size = size diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index 265c7903..178240ea 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -29,6 +29,7 @@ public class FramebufferCache { //print("Restoring previous framebuffer") framebuffer = framebufferCache[hash]!.removeLast() framebuffer.orientation = orientation + framebuffer.sampleTime = nil } else { do { //debugPrint("Generating new framebuffer at size: \(size)") diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index f96254d7..911317e1 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -191,7 +191,7 @@ public class TargetContainer:Sequence { } public class SourceContainer { - var sources:[UInt:ImageSource] = [:] + public var sources:[UInt:ImageSource] = [:] public init() { } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index b9f7e341..25c60fc7 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -15,7 +15,8 @@ public class MovieInput: ImageSource { var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 - + + // TODO: Add movie reader synchronization // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false) throws { @@ -40,84 +41,76 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Playback control + + @objc public func start() { + guard !self.started else { return } + + self.started = true + + Thread.detachNewThreadSelector(#selector(beginReading), toTarget: self, with: nil) + } + + public func cancel() { + self.started = false + + self.assetReader?.cancelReading() + } - public func createReader() -> AVAssetReader? + // MARK: - + // MARK: Internal processing functions + + func createReader() { - var assetReader: AVAssetReader? + self.assetReader = nil + do { let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - assetReader = try AVAssetReader.init(asset: self.asset) + let assetReader = try AVAssetReader.init(asset: self.asset) if(self.videoComposition == nil) { let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) readerVideoTrackOutput.alwaysCopiesSampleData = false - assetReader!.add(readerVideoTrackOutput) + assetReader.add(readerVideoTrackOutput) } else { let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: AVMediaTypeVideo), videoSettings: outputSettings) readerVideoTrackOutput.videoComposition = self.videoComposition readerVideoTrackOutput.alwaysCopiesSampleData = false - assetReader!.add(readerVideoTrackOutput) + assetReader.add(readerVideoTrackOutput) } - + + self.assetReader = assetReader } catch { print("Could not create asset reader: \(error)") } - - return assetReader } - - - public func start() { - if(self.started) { return } + + @objc func beginReading() { + self.configureThread() - if(assetReader == nil) { assetReader = createReader() } - if(assetReader == nil) { return } + self.createReader() - self.started = true + guard let assetReader = self.assetReader else { + self.started = false + return + } - asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{ - DispatchQueue.global().async(qos: .background) { - guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return } - guard let assetReader = self.assetReader else { return } - guard self.started else { return } - - do { - try ObjC.catchException { - guard assetReader.startReading() else { - print("Couldn't start reading: \(assetReader.error)") - return - } - } - } - catch { - print("Couldn't start reading: \(error)") + do { + try ObjC.catchException { + guard assetReader.startReading() else { + print("Couldn't start reading: \(assetReader.error)") + self.started = false return } - - Thread.detachNewThreadSelector(#selector(self.beginReading), toTarget: self, with: nil) } - }) - } - - public func cancel() { - if let assetReader = self.assetReader { - // Make sure this is called before cancelling the reader - // If you don't and this is a looping video it will just start the video over again + } + catch { + print("Couldn't start reading: \(error)") self.started = false - - assetReader.cancelReading() - self.assetReader = nil + return } - } - - // MARK: - - // MARK: Internal processing functions - - @objc func beginReading() { - guard let assetReader = self.assetReader else { return } var readerVideoTrackOutput:AVAssetReaderOutput? = nil; @@ -127,13 +120,12 @@ public class MovieInput: ImageSource { } } - self.configureThread() - while(assetReader.status == .reading) { self.readNextVideoFrame(from:readerVideoTrackOutput!) } // Make sure the video is still playing and has not been cancelled by user + // and also that we didn't only want one frame if (self.loop && self.started) { self.cancel() self.start() @@ -150,7 +142,7 @@ public class MovieInput: ImageSource { let renderStart = DispatchTime.now() var frameDurationNanos: Float64 = 0 - if (playAtActualSpeed) { + if (self.playAtActualSpeed) { // Sample time eg. first frame is 0,30 second frame is 1,30 let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) @@ -166,7 +158,7 @@ public class MovieInput: ImageSource { CMSampleBufferInvalidate(sampleBuffer) } - if(playAtActualSpeed) { + if(self.playAtActualSpeed) { let renderEnd = DispatchTime.now() // Find the amount of time it took to display the last frame @@ -278,6 +270,7 @@ public class MovieInput: ImageSource { chrominanceFramebuffer.lock() let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + movieFramebuffer.sampleTime = withSampleTime convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) @@ -315,7 +308,6 @@ public class MovieInput: ImageSource { // The anticpated frame render duration is in the 1-3 ms range on an iPhone 6 for 1080p without filters and 1-7 ms range with filters // If the render duration is allowed to exceed 16ms (the duration of a frame in 60fps video) // the 60fps video will no longer be playing in real time. - // If this computation value is set too high, setting the thread policy will fail. let computation = UInt32(5 * clock2abs) // Tell the scheduler the next 20 ms of work needs to be done as soon as possible. let period = UInt32(0 * clock2abs) @@ -334,7 +326,7 @@ public class MovieInput: ImageSource { policy.period = period policy.computation = computation policy.constraint = constraint - policy.preemptible = 0 // Very important, otherwise video will slow down over time. + policy.preemptible = 0 ret = withUnsafeMutablePointer(to: &policy) { $0.withMemoryRebound(to: integer_t.self, capacity: Int(THREAD_TIME_CONSTRAINT_POLICY_COUNT)) { diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 6be0186e..1ffcefc4 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -212,6 +212,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.startTime = frameTime self.firstFrameTime = frameTime } + + self.previousFrameTime = frameTime guard (self.assetWriterVideoInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { debugPrint("Had to drop a frame at time \(frameTime)") diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index a2b86af1..20a29e0e 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -136,12 +136,12 @@ public class RenderView:UIView, ImageConsumer { let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(self.orientation), toFitSize:backingSize) renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(self.orientation)]) - framebuffer.unlock() glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) sharedImageProcessingContext.presentBufferForDisplay() self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + framebuffer.unlock() } } From 1623dc08f6e77958655ab4e37c9d4e5336ab5aa7 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 28 Jan 2018 16:20:44 -0800 Subject: [PATCH 029/332] Resolve random OpenGL crashes from calling gl functions on main thread in framebuffer deinit In my case the crash would occur when when glClear() was called inside clearFramebufferWithColor() but it could happen anywhere in the opengl code. --- framework/Source/Framebuffer.swift | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 6bfbcdc2..204e3dcd 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -99,18 +99,24 @@ public class Framebuffer { deinit { if (!textureOverride) { var mutableTexture = texture - glDeleteTextures(1, &mutableTexture) + context.runOperationAsynchronously { + glDeleteTextures(1, &mutableTexture) + } //debugPrint("Delete texture at size: \(size)") } if let framebuffer = framebuffer { var mutableFramebuffer = framebuffer - glDeleteFramebuffers(1, &mutableFramebuffer) + context.runOperationAsynchronously { + glDeleteFramebuffers(1, &mutableFramebuffer) + } } if let stencilBuffer = stencilBuffer { var mutableStencil = stencilBuffer - glDeleteRenderbuffers(1, &mutableStencil) + context.runOperationAsynchronously { + glDeleteRenderbuffers(1, &mutableStencil) + } } } From 28d3df60cf5916c447fcc424e305a1dec36ab6e1 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 29 Jan 2018 19:06:06 -0800 Subject: [PATCH 030/332] Crop operation adjusts in real time to changing crop sizes Open up some functions to allow for subclassing --- framework/Source/BasicOperation.swift | 6 +++--- framework/Source/Framebuffer.swift | 2 +- framework/Source/FramebufferCache.swift | 1 + .../Source/Operations/AverageColorExtractor.swift | 2 +- .../Operations/AverageLuminanceExtractor.swift | 2 +- framework/Source/Operations/Crop.swift | 12 ++++++++---- framework/Source/Operations/Histogram.swift | 2 +- framework/Source/Operations/MotionBlur.swift | 2 +- framework/Source/Operations/Sharpen.swift | 2 +- framework/Source/Operations/TransformOperation.swift | 4 ++-- framework/Source/TextureSamplingOperation.swift | 2 +- 11 files changed, 21 insertions(+), 16 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 6c058f34..ecd3931c 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -42,7 +42,7 @@ open class BasicOperation: ImageProcessingOperation { public let targets = TargetContainer() public let sources = SourceContainer() var shader:ShaderProgram - var inputFramebuffers = [UInt:Framebuffer]() + public var inputFramebuffers = [UInt:Framebuffer]() var renderFramebuffer:Framebuffer! var outputFramebuffer:Framebuffer { get { return renderFramebuffer } } let usesAspectRatio:Bool @@ -106,7 +106,7 @@ open class BasicOperation: ImageProcessingOperation { } } - func renderFrame() { + open func renderFrame() { renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil:mask != nil) let textureProperties = initialTextureProperties() @@ -178,7 +178,7 @@ open class BasicOperation: ImageProcessingOperation { return inputTextureProperties } - func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { if usesAspectRatio { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) uniformSettings["aspectRatio"] = inputFramebuffer.aspectRatioForRotation(outputRotation) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 204e3dcd..ea4b9a2f 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -120,7 +120,7 @@ public class Framebuffer { } } - func sizeForTargetOrientation(_ targetOrientation:ImageOrientation) -> GLSize { + public func sizeForTargetOrientation(_ targetOrientation:ImageOrientation) -> GLSize { if self.orientation.rotationNeededForOrientation(targetOrientation).flipsDimensions() { return GLSize(width:size.height, height:size.width) } else { diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index 178240ea..f51152e0 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -25,6 +25,7 @@ public class FramebufferCache { public func requestFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Framebuffer { let hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) let framebuffer:Framebuffer + if ((framebufferCache[hash]?.count ?? -1) > 0) { //print("Restoring previous framebuffer") framebuffer = framebufferCache[hash]!.removeLast() diff --git a/framework/Source/Operations/AverageColorExtractor.swift b/framework/Source/Operations/AverageColorExtractor.swift index 1e4911ab..ccf86608 100755 --- a/framework/Source/Operations/AverageColorExtractor.swift +++ b/framework/Source/Operations/AverageColorExtractor.swift @@ -22,7 +22,7 @@ public class AverageColorExtractor: BasicOperation { super.init(vertexShader:AverageColorVertexShader, fragmentShader:AverageColorFragmentShader) } - override func renderFrame() { + override open func renderFrame() { averageColorBySequentialReduction(inputFramebuffer:inputFramebuffers[0]!, shader:shader, extractAverageOperation:extractAverageColorFromFramebuffer) releaseIncomingFramebuffers() } diff --git a/framework/Source/Operations/AverageLuminanceExtractor.swift b/framework/Source/Operations/AverageLuminanceExtractor.swift index 57e22336..a7d4915e 100644 --- a/framework/Source/Operations/AverageLuminanceExtractor.swift +++ b/framework/Source/Operations/AverageLuminanceExtractor.swift @@ -19,7 +19,7 @@ public class AverageLuminanceExtractor: BasicOperation { super.init(vertexShader:AverageColorVertexShader, fragmentShader:AverageLuminanceFragmentShader) } - override func renderFrame() { + override open func renderFrame() { // Reduce to luminance before passing into the downsampling // TODO: Combine this with the first stage of the downsampling by doing reduction here let luminancePassShader = crashOnShaderCompileFailure("AverageLuminance"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:LuminanceFragmentShader)} diff --git a/framework/Source/Operations/Crop.swift b/framework/Source/Operations/Crop.swift index eb452c53..7cf1c52c 100644 --- a/framework/Source/Operations/Crop.swift +++ b/framework/Source/Operations/Crop.swift @@ -1,7 +1,6 @@ -// TODO: Have this adjust in real time to changing crop sizes // TODO: Verify at all orientations -public class Crop: BasicOperation { +open class Crop: BasicOperation { public var cropSizeInPixels: Size? public var locationOfCropInPixels: Position? @@ -9,7 +8,7 @@ public class Crop: BasicOperation { super.init(fragmentShader:PassthroughFragmentShader, numberOfInputs:1) } - override func renderFrame() { + override open func renderFrame() { let inputFramebuffer:Framebuffer = inputFramebuffers[0]! let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait) @@ -29,7 +28,12 @@ public class Crop: BasicOperation { } let normalizedCropSize = Size(width:Float(finalCropSize.width) / Float(inputSize.width), height:Float(finalCropSize.height) / Float(inputSize.height)) - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:finalCropSize, stencil:false) + do { + renderFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: finalCropSize, stencil:false) + } catch { + print("Could not create a framebuffer of the size (\(finalCropSize.width), \(finalCropSize.height)), error: \(error)") + return + } let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) diff --git a/framework/Source/Operations/Histogram.swift b/framework/Source/Operations/Histogram.swift index 95274542..6fa49515 100755 --- a/framework/Source/Operations/Histogram.swift +++ b/framework/Source/Operations/Histogram.swift @@ -50,7 +50,7 @@ public class Histogram: BasicOperation { } } - override func renderFrame() { + override open func renderFrame() { let inputSize = sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!) let inputByteSize = Int(inputSize.width * inputSize.height * 4) let data = UnsafeMutablePointer.allocate(capacity:inputByteSize) diff --git a/framework/Source/Operations/MotionBlur.swift b/framework/Source/Operations/MotionBlur.swift index 6c76fe29..8b3f0607 100755 --- a/framework/Source/Operations/MotionBlur.swift +++ b/framework/Source/Operations/MotionBlur.swift @@ -12,7 +12,7 @@ public class MotionBlur: BasicOperation { super.init(vertexShader:MotionBlurVertexShader, fragmentShader:MotionBlurFragmentShader, numberOfInputs:1) } - override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) let texelSize = inputFramebuffer.texelSize(for:outputRotation) diff --git a/framework/Source/Operations/Sharpen.swift b/framework/Source/Operations/Sharpen.swift index 3ba518dc..770d579d 100644 --- a/framework/Source/Operations/Sharpen.swift +++ b/framework/Source/Operations/Sharpen.swift @@ -8,7 +8,7 @@ public class Sharpen: BasicOperation { ({sharpness = 0.0})() } - override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for:outputRotation) uniformSettings["texelWidth"] = texelSize.width diff --git a/framework/Source/Operations/TransformOperation.swift b/framework/Source/Operations/TransformOperation.swift index 6b87a377..db5149ec 100644 --- a/framework/Source/Operations/TransformOperation.swift +++ b/framework/Source/Operations/TransformOperation.swift @@ -12,7 +12,7 @@ #endif #endif -public class TransformOperation: BasicOperation { +open class TransformOperation: BasicOperation { public var transform:Matrix4x4 = Matrix4x4.identity { didSet { uniformSettings["transformMatrix"] = transform } } var normalizedImageVertices:[GLfloat]! @@ -27,7 +27,7 @@ public class TransformOperation: BasicOperation { releaseIncomingFramebuffers() } - override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) let orthoMatrix = orthographicMatrix(-1.0, right:1.0, bottom:-1.0 * aspectRatio, top:1.0 * aspectRatio, near:-1.0, far:1.0) diff --git a/framework/Source/TextureSamplingOperation.swift b/framework/Source/TextureSamplingOperation.swift index 19026fd0..60fc3451 100644 --- a/framework/Source/TextureSamplingOperation.swift +++ b/framework/Source/TextureSamplingOperation.swift @@ -5,7 +5,7 @@ open class TextureSamplingOperation: BasicOperation { super.init(vertexShader:vertexShader, fragmentShader:fragmentShader, numberOfInputs:numberOfInputs) } - override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for:outputRotation) uniformSettings["texelWidth"] = texelSize.width From 207e3d8d263a11e7b09e6a82e6d5b76d2aa2563b Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Tue, 30 Jan 2018 01:20:21 -0800 Subject: [PATCH 031/332] Add support for anchorTopLeft on TransformOperation Add warning for runaway framebuffer cache --- framework/Source/FramebufferCache.swift | 4 ++++ .../Source/Operations/TransformOperation.swift | 16 ++++++++++++---- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index f51152e0..62eb50ae 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -26,6 +26,10 @@ public class FramebufferCache { let hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) let framebuffer:Framebuffer + if(framebufferCache.count > 20) { + print("Warning: Runaway framebuffer cache with size: \(framebufferCache.count)") + } + if ((framebufferCache[hash]?.count ?? -1) > 0) { //print("Restoring previous framebuffer") framebuffer = framebufferCache[hash]!.removeLast() diff --git a/framework/Source/Operations/TransformOperation.swift b/framework/Source/Operations/TransformOperation.swift index db5149ec..ec26136f 100644 --- a/framework/Source/Operations/TransformOperation.swift +++ b/framework/Source/Operations/TransformOperation.swift @@ -14,6 +14,7 @@ open class TransformOperation: BasicOperation { public var transform:Matrix4x4 = Matrix4x4.identity { didSet { uniformSettings["transformMatrix"] = transform } } + public var anchorTopLeft = false var normalizedImageVertices:[GLfloat]! public init() { @@ -30,13 +31,20 @@ open class TransformOperation: BasicOperation { override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) - let orthoMatrix = orthographicMatrix(-1.0, right:1.0, bottom:-1.0 * aspectRatio, top:1.0 * aspectRatio, near:-1.0, far:1.0) + let orthoMatrix = orthographicMatrix(-1.0, right:1.0, bottom:-1.0 * aspectRatio, top:1.0 * aspectRatio, near:-1.0, far:1.0, anchorTopLeft:anchorTopLeft) normalizedImageVertices = normalizedImageVerticesForAspectRatio(aspectRatio) uniformSettings["orthographicMatrix"] = orthoMatrix } + + func normalizedImageVerticesForAspectRatio(_ aspectRatio:Float) -> [GLfloat] { + if(anchorTopLeft) { + return [0.0, 0.0, 1.0, 0.0, 0.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] + } + else { + return [-1.0, GLfloat(-aspectRatio), 1.0, GLfloat(-aspectRatio), -1.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] + } + } } -func normalizedImageVerticesForAspectRatio(_ aspectRatio:Float) -> [GLfloat] { - return [-1.0, GLfloat(-aspectRatio), 1.0, GLfloat(-aspectRatio), -1.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] -} + From 1416ba1355b8a0252bc657f5c245dcf07c5b5c1b Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Tue, 30 Jan 2018 12:33:39 -0800 Subject: [PATCH 032/332] Revert change to crop operation --- framework/Source/Operations/Crop.swift | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/framework/Source/Operations/Crop.swift b/framework/Source/Operations/Crop.swift index 7cf1c52c..5fd8345d 100644 --- a/framework/Source/Operations/Crop.swift +++ b/framework/Source/Operations/Crop.swift @@ -28,12 +28,7 @@ open class Crop: BasicOperation { } let normalizedCropSize = Size(width:Float(finalCropSize.width) / Float(inputSize.width), height:Float(finalCropSize.height) / Float(inputSize.height)) - do { - renderFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: finalCropSize, stencil:false) - } catch { - print("Could not create a framebuffer of the size (\(finalCropSize.width), \(finalCropSize.height)), error: \(error)") - return - } + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:finalCropSize, stencil:false) let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) From f0ba5a35b20714bdea18d93fec8bb83bb1578de9 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Tue, 30 Jan 2018 12:34:35 -0800 Subject: [PATCH 033/332] Add back missing TODO --- framework/Source/Operations/Crop.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/Source/Operations/Crop.swift b/framework/Source/Operations/Crop.swift index 5fd8345d..f0113da8 100644 --- a/framework/Source/Operations/Crop.swift +++ b/framework/Source/Operations/Crop.swift @@ -1,3 +1,4 @@ +// TODO: Have this adjust in real time to changing crop sizes // TODO: Verify at all orientations open class Crop: BasicOperation { From 8d59d91b9643e209cc92bb56ec77a17c81d1bf77 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Tue, 30 Jan 2018 13:27:25 -0800 Subject: [PATCH 034/332] Add ignoreAspectRatio support to TransformOperation --- framework/Source/Operations/TransformOperation.swift | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/framework/Source/Operations/TransformOperation.swift b/framework/Source/Operations/TransformOperation.swift index ec26136f..e89a7300 100644 --- a/framework/Source/Operations/TransformOperation.swift +++ b/framework/Source/Operations/TransformOperation.swift @@ -15,6 +15,7 @@ open class TransformOperation: BasicOperation { public var transform:Matrix4x4 = Matrix4x4.identity { didSet { uniformSettings["transformMatrix"] = transform } } public var anchorTopLeft = false + public var ignoreAspectRatio = false var normalizedImageVertices:[GLfloat]! public init() { @@ -30,7 +31,10 @@ open class TransformOperation: BasicOperation { override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) + var aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) + if(ignoreAspectRatio) { + aspectRatio = 1 + } let orthoMatrix = orthographicMatrix(-1.0, right:1.0, bottom:-1.0 * aspectRatio, top:1.0 * aspectRatio, near:-1.0, far:1.0, anchorTopLeft:anchorTopLeft) normalizedImageVertices = normalizedImageVerticesForAspectRatio(aspectRatio) From 98549b0eabc6bc59f927c92b9011d4ffb52d0703 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Fri, 2 Feb 2018 15:45:11 -0800 Subject: [PATCH 035/332] MovieInput pausing support, improved multithreading support and stability RenderView require first frame to be drawn during next UIKit drawing cycle to prevent distortion issues if view is not yet drawn to screen at correct size (even if bounds/frame is correct) --- framework/Source/iOS/MovieInput.swift | 95 ++++++++++++++------------- framework/Source/iOS/RenderView.swift | 91 +++++++++++++++++++------ 2 files changed, 123 insertions(+), 63 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 25c60fc7..af98f82c 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -6,17 +6,16 @@ public class MovieInput: ImageSource { let yuvConversionShader:ShaderProgram let asset:AVAsset - let videoComposition: AVVideoComposition? - var assetReader:AVAssetReader? - var started = false + let videoComposition:AVVideoComposition? let playAtActualSpeed:Bool public var loop:Bool - var previousFrameTime = kCMTimeZero + var previousFrameTime:CMTime? + var currentThread:Thread? + var startFrameTime:CMTime? var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 - // TODO: Add movie reader synchronization // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false) throws { @@ -41,28 +40,37 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Playback control + // Only call these methods from the main thread @objc public func start() { - guard !self.started else { return } - - self.started = true + if let currentThread = self.currentThread, + currentThread.isExecuting, + !currentThread.isCancelled { + // If the current thread is running and has not been cancelled, bail. + return + } + // Just to be safe. + self.currentThread?.cancel() - Thread.detachNewThreadSelector(#selector(beginReading), toTarget: self, with: nil) + self.currentThread = Thread(target: self, selector: #selector(beginReading), object: nil) + self.currentThread?.start() } public func cancel() { - self.started = false - - self.assetReader?.cancelReading() + self.currentThread?.cancel() + self.currentThread = nil + } + + public func pause() { + self.cancel() + self.startFrameTime = self.previousFrameTime } // MARK: - // MARK: Internal processing functions - func createReader() + func createReader() -> AVAssetReader? { - self.assetReader = nil - do { let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] @@ -81,34 +89,37 @@ public class MovieInput: ImageSource { assetReader.add(readerVideoTrackOutput) } - self.assetReader = assetReader + if let startFrameTime = self.startFrameTime { + assetReader.timeRange = CMTimeRange(start: startFrameTime, duration: kCMTimePositiveInfinity) + } + self.startFrameTime = nil + + return assetReader } catch { print("Could not create asset reader: \(error)") } + return nil } @objc func beginReading() { - self.configureThread() + let thread = Thread.current - self.createReader() + self.configureThread() - guard let assetReader = self.assetReader else { - self.started = false - return + guard let assetReader = self.createReader() else { + return // A return statement will end thread execution } do { try ObjC.catchException { guard assetReader.startReading() else { print("Couldn't start reading: \(assetReader.error)") - self.started = false return } } } catch { print("Couldn't start reading: \(error)") - self.started = false return } @@ -116,27 +127,30 @@ public class MovieInput: ImageSource { for output in assetReader.outputs { if(output.mediaType == AVMediaTypeVideo) { - readerVideoTrackOutput = output; + readerVideoTrackOutput = output } } while(assetReader.status == .reading) { - self.readNextVideoFrame(from:readerVideoTrackOutput!) + if(thread.isCancelled) { break } + self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) } - // Make sure the video is still playing and has not been cancelled by user - // and also that we didn't only want one frame - if (self.loop && self.started) { - self.cancel() - self.start() - } else { - self.cancel() + assetReader.cancelReading() + + // Since only the main thread will cancel threads + // jump onto the main thead to prevent the current thread from being cancelled + // in between the below if statement check and creating the new thread + DispatchQueue.main.async { + // Start the video over so long as it wasn't cancelled + if (self.loop && !thread.isCancelled) { + self.currentThread = Thread(target: self, selector: #selector(self.beginReading), object: nil) + self.currentThread?.start() + } } } - func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { - guard let assetReader = self.assetReader else { return } - + func readNextVideoFrame(with assetReader: AVAssetReader, from videoTrackOutput:AVAssetReaderOutput) { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { let renderStart = DispatchTime.now() @@ -147,7 +161,7 @@ public class MovieInput: ImageSource { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) // Retrieve the rolling frame rate (duration between each frame) - let frameDuration = CMTimeSubtract(currentSampleTime, previousFrameTime) + let frameDuration = CMTimeSubtract(currentSampleTime, self.previousFrameTime ?? kCMTimeZero) frameDurationNanos = CMTimeGetSeconds(frameDuration) * 1_000_000_000 self.previousFrameTime = currentSampleTime @@ -178,19 +192,12 @@ public class MovieInput: ImageSource { } } } -// else if (synchronizedMovieWriter != nil) { -// if (assetReader.status == .Completed) { -// self.endProcessing() -// } -// } - } func process(movieFrame frame:CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! - -// processingFrameTime = currentSampleTime + self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) } diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 20a29e0e..5396ef6b 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -1,11 +1,12 @@ import UIKit public protocol RenderViewDelegate: class { + func willDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) func didDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) + func shouldDisplayNextFramebufferOnMainThread() -> Bool } // TODO: Add support for transparency -// TODO: Deal with view resizing public class RenderView:UIView, ImageConsumer { public weak var delegate:RenderViewDelegate? @@ -26,8 +27,6 @@ public class RenderView:UIView, ImageConsumer { private var internalLayer: CAEAGLLayer! - // TODO: Need to set viewport to appropriate size, resize viewport on view reshape - required public init?(coder:NSCoder) { super.init(coder:coder) self.commonInit() @@ -48,7 +47,7 @@ public class RenderView:UIView, ImageConsumer { didSet { // Check if the size changed if(oldValue.size != self.bounds.size) { - // Destroy the displayFramebuffer so we render at the correct size + // Destroy the displayFramebuffer so we render at the correct size for the next frame self.destroyDisplayFramebuffer() } } @@ -68,7 +67,35 @@ public class RenderView:UIView, ImageConsumer { destroyDisplayFramebuffer() } + var waitingForTransaction = false + func presentWithTransaction() { + if #available(iOS 9.0, *) { + self.internalLayer.presentsWithTransaction = true + self.waitingForTransaction = true + + CATransaction.begin() + CATransaction.setCompletionBlock({ + self.internalLayer.presentsWithTransaction = false + self.waitingForTransaction = false + }) + CATransaction.commit() + } + } + func createDisplayFramebuffer() -> Bool { + // Prevent the first frame from prematurely drawing before the view is drawn to the screen at the right size + // Aka we want to briefly synchronize UIKit with OpenGL. OpenGL draws immediately but UIKit draws in cycles. + // Note: We have to wait for the transaction to finish before we disable this (aka for the drawing cycle to finish) + // we can't just disable presentsWithTransaction after the first frame because it may even take a couple frames for + // a UIKit drawing cycle to complete (rarely but sometimes) + // Without this you will get weird content flashes when switching between videos of different size + // since the content will be drawn into a view that which although has the right frame/bounds it is not + // yet actually reflected on the screen. OpenGL would just draw right into the wrongly displayed view + // as soon as presentBufferForDisplay() is called. + // Source --> https://stackoverflow.com/a/30722276/1275014 + // Source --> https://developer.apple.com/documentation/quartzcore/caeagllayer/1618676-presentswithtransaction + self.presentWithTransaction() + var newDisplayFramebuffer:GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) displayFramebuffer = newDisplayFramebuffer @@ -79,7 +106,6 @@ public class RenderView:UIView, ImageConsumer { displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) - CATransaction.flush() sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.internalLayer) var backingWidth:GLint = 0 @@ -126,22 +152,49 @@ public class RenderView:UIView, ImageConsumer { } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - if (self.displayFramebuffer == nil && !self.createDisplayFramebuffer()) { - // Bail if we couldn't successfully create the displayFramebuffer - return + let processFramebuffer = { + // Don't bog down UIKIt with a bunch of framebuffers if we are waiting for a transaction to complete + // otherwise we will block the main thread as it trys to catch up. + if (self.waitingForTransaction) { return } + + self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + + sharedImageProcessingContext.runOperationSynchronously { + if (self.displayFramebuffer == nil && !self.createDisplayFramebuffer()) { + // Bail if we couldn't successfully create the displayFramebuffer + return + } + self.activateDisplayFramebuffer() + + clearFramebufferWithColor(self.backgroundRenderColor) + + let scaledVertices = self.fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(self.orientation), toFitSize:self.backingSize) + renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(self.orientation)]) + + glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.displayRenderbuffer!) + + sharedImageProcessingContext.presentBufferForDisplay() + } + + self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + + sharedImageProcessingContext.runOperationSynchronously { + framebuffer.unlock() + } } - self.activateDisplayFramebuffer() - - clearFramebufferWithColor(backgroundRenderColor) - let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(self.orientation), toFitSize:backingSize) - renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(self.orientation)]) - - glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) - sharedImageProcessingContext.presentBufferForDisplay() - - self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) - framebuffer.unlock() + if(self.delegate?.shouldDisplayNextFramebufferOnMainThread() ?? false) { + // CAUTION: Never call sync from the sharedImageProcessingContext, it will cause cyclic thread deadlocks + // If you are curious, change this to sync, then try trimming/scrubbing a video + // Before that happens you will get a deadlock when someone calls runOperationSynchronously since the main thread is blocked + // There is a way to get around this but then the first thing mentioned will happen + DispatchQueue.main.async { + processFramebuffer() + } + } + else { + processFramebuffer() + } } } From 2bff2b5700e38301aaf7e7d0113bd1652ea7f409 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 8 Feb 2018 14:27:06 -0800 Subject: [PATCH 036/332] MovieInput last frame reprocessing support for when paused, and customizable startFrameTime support --- framework/Source/Framebuffer.swift | 2 +- framework/Source/ImageOrientation.swift | 2 +- framework/Source/OperationGroup.swift | 4 +-- framework/Source/iOS/MovieInput.swift | 31 +++++++++++++++++++---- framework/Source/iOS/RenderView.swift | 33 ++++++++++++++----------- 5 files changed, 49 insertions(+), 23 deletions(-) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index ea4b9a2f..228a32d4 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -128,7 +128,7 @@ public class Framebuffer { } } - func aspectRatioForRotation(_ rotation:Rotation) -> Float { + public func aspectRatioForRotation(_ rotation:Rotation) -> Float { if rotation.flipsDimensions() { return Float(size.width) / Float(size.height) } else { diff --git a/framework/Source/ImageOrientation.swift b/framework/Source/ImageOrientation.swift index afdc073c..7371c0d1 100644 --- a/framework/Source/ImageOrientation.swift +++ b/framework/Source/ImageOrientation.swift @@ -4,7 +4,7 @@ public enum ImageOrientation { case landscapeLeft case landscapeRight - func rotationNeededForOrientation(_ targetOrientation:ImageOrientation) -> Rotation { + public func rotationNeededForOrientation(_ targetOrientation:ImageOrientation) -> Rotation { switch (self, targetOrientation) { case (.portrait, .portrait), (.portraitUpsideDown, .portraitUpsideDown), (.landscapeLeft, .landscapeLeft), (.landscapeRight, .landscapeRight): return .noRotation case (.portrait, .portraitUpsideDown): return .rotate180 diff --git a/framework/Source/OperationGroup.swift b/framework/Source/OperationGroup.swift index 8e6f5675..634c7219 100644 --- a/framework/Source/OperationGroup.swift +++ b/framework/Source/OperationGroup.swift @@ -1,6 +1,6 @@ open class OperationGroup: ImageProcessingOperation { - let inputImageRelay = ImageRelay() - let outputImageRelay = ImageRelay() + public let inputImageRelay = ImageRelay() + public let outputImageRelay = ImageRelay() public var sources:SourceContainer { get { return inputImageRelay.sources } } public var targets:TargetContainer { get { return outputImageRelay.targets } } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index af98f82c..186a33e7 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -9,13 +9,20 @@ public class MovieInput: ImageSource { let videoComposition:AVVideoComposition? let playAtActualSpeed:Bool public var loop:Bool - var previousFrameTime:CMTime? + public var startFrameTime:CMTime? + public var currentFrameTime:CMTime? { + get { + return self.lastFrameTime + } + } var currentThread:Thread? - var startFrameTime:CMTime? + var lastFrameTime:CMTime? var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 + var movieFramebuffer:Framebuffer? + // TODO: Add movie reader synchronization // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false) throws { @@ -35,6 +42,7 @@ public class MovieInput: ImageSource { } deinit { + self.movieFramebuffer?.unlock() self.cancel() } @@ -63,7 +71,7 @@ public class MovieInput: ImageSource { public func pause() { self.cancel() - self.startFrameTime = self.previousFrameTime + self.startFrameTime = self.lastFrameTime } // MARK: - @@ -93,6 +101,7 @@ public class MovieInput: ImageSource { assetReader.timeRange = CMTimeRange(start: startFrameTime, duration: kCMTimePositiveInfinity) } self.startFrameTime = nil + self.lastFrameTime = nil return assetReader } catch { @@ -161,10 +170,10 @@ public class MovieInput: ImageSource { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) // Retrieve the rolling frame rate (duration between each frame) - let frameDuration = CMTimeSubtract(currentSampleTime, self.previousFrameTime ?? kCMTimeZero) + let frameDuration = CMTimeSubtract(currentSampleTime, self.lastFrameTime ?? CMTimeAdd(currentSampleTime, CMTime(value: 1, timescale: 30))) frameDurationNanos = CMTimeGetSeconds(frameDuration) * 1_000_000_000 - self.previousFrameTime = currentSampleTime + self.lastFrameTime = currentSampleTime } sharedImageProcessingContext.runOperationSynchronously{ @@ -276,13 +285,17 @@ public class MovieInput: ImageSource { chrominanceFramebuffer.lock() + self.movieFramebuffer?.unlock() let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + movieFramebuffer.lock() movieFramebuffer.sampleTime = withSampleTime convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) + self.movieFramebuffer = movieFramebuffer + self.updateTargetsWithFramebuffer(movieFramebuffer) if self.runBenchmark { @@ -298,6 +311,14 @@ public class MovieInput: ImageSource { // Not needed for movie inputs } + public func reprocessLastFrame() { + sharedImageProcessingContext.runOperationAsynchronously { + if let movieFramebuffer = self.movieFramebuffer { + self.updateTargetsWithFramebuffer(movieFramebuffer) + } + } + } + // MARK: - // MARK: Thread configuration diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 5396ef6b..51f451b9 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -83,19 +83,7 @@ public class RenderView:UIView, ImageConsumer { } func createDisplayFramebuffer() -> Bool { - // Prevent the first frame from prematurely drawing before the view is drawn to the screen at the right size - // Aka we want to briefly synchronize UIKit with OpenGL. OpenGL draws immediately but UIKit draws in cycles. - // Note: We have to wait for the transaction to finish before we disable this (aka for the drawing cycle to finish) - // we can't just disable presentsWithTransaction after the first frame because it may even take a couple frames for - // a UIKit drawing cycle to complete (rarely but sometimes) - // Without this you will get weird content flashes when switching between videos of different size - // since the content will be drawn into a view that which although has the right frame/bounds it is not - // yet actually reflected on the screen. OpenGL would just draw right into the wrongly displayed view - // as soon as presentBufferForDisplay() is called. - // Source --> https://stackoverflow.com/a/30722276/1275014 - // Source --> https://developer.apple.com/documentation/quartzcore/caeagllayer/1618676-presentswithtransaction - self.presentWithTransaction() - + sharedImageProcessingContext.makeCurrentContext() var newDisplayFramebuffer:GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) displayFramebuffer = newDisplayFramebuffer @@ -114,8 +102,12 @@ public class RenderView:UIView, ImageConsumer { glGetRenderbufferParameteriv(GLenum(GL_RENDERBUFFER), GLenum(GL_RENDERBUFFER_HEIGHT), &backingHeight) backingSize = GLSize(width:backingWidth, height:backingHeight) - guard ((backingWidth > 0) && (backingHeight > 0)) else { + guard (backingWidth > 0 && backingHeight > 0) else { print("Warning: View had a zero size") + + if(self.internalLayer.bounds.width > 0 && self.internalLayer.bounds.height > 0) { + print("Warning: View size \(self.internalLayer.bounds) may be too large ") + } return false } @@ -127,6 +119,19 @@ public class RenderView:UIView, ImageConsumer { return false } + // Prevent the first frame from prematurely drawing before the view is drawn to the screen at the right size + // Aka we want to briefly synchronize UIKit with OpenGL. OpenGL draws immediately but UIKit draws in cycles. + // Note: We have to wait for the transaction to finish (aka for the drawing cycle to finish) before we disable this + // we can't just disable presentsWithTransaction after the first frame because it may even take a couple frames for + // a UIKit drawing cycle to complete (rarely but sometimes) + // Without this you will get weird content flashes when switching between videos of different size + // since the content will be drawn into a view that which although has the right frame/bounds it is not + // yet actually reflected on the screen. OpenGL would just draw right into the wrongly displayed view + // as soon as presentBufferForDisplay() is called. + // Source --> https://stackoverflow.com/a/30722276/1275014 + // Source --> https://developer.apple.com/documentation/quartzcore/caeagllayer/1618676-presentswithtransaction + self.presentWithTransaction() + return true } From 343d9fc043b0909041afd1e9b40935c90f569157 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Tue, 20 Feb 2018 21:20:55 -0800 Subject: [PATCH 037/332] Safer PictureInput, disable framebuffer forwarding --- framework/Source/iOS/PictureInput.swift | 35 +++++++++++++++---------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index eea6d4d1..fac0f0be 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -3,7 +3,7 @@ import UIKit public class PictureInput: ImageSource { public let targets = TargetContainer() - var imageFramebuffer:Framebuffer! + var imageFramebuffer:Framebuffer? var hasProcessedImage:Bool = false public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { @@ -86,19 +86,21 @@ public class PictureInput: ImageSource { imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) } else { // Access the raw image bytes directly - dataFromImageDataProvider = image.dataProvider?.data + guard let data = image.dataProvider?.data else { return } + dataFromImageDataProvider = data imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)) } do { // TODO: Alter orientation based on metadata from photo self.imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) - self.imageFramebuffer.lock() + self.imageFramebuffer!.lock() } catch { - fatalError("ERROR: Unable to initialize framebuffer of size (\(widthToUseForTexture), \(heightToUseForTexture)) with error: \(error)") + print("ERROR: Unable to initialize framebuffer of size (\(widthToUseForTexture), \(heightToUseForTexture)) with error: \(error)") + return } - glBindTexture(GLenum(GL_TEXTURE_2D), self.imageFramebuffer.texture) + glBindTexture(GLenum(GL_TEXTURE_2D), self.imageFramebuffer!.texture) if (smoothlyScaleOutput) { glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR_MIPMAP_LINEAR) } @@ -109,8 +111,6 @@ public class PictureInput: ImageSource { glGenerateMipmap(GLenum(GL_TEXTURE_2D)) } glBindTexture(GLenum(GL_TEXTURE_2D), 0) - - } if (shouldRedrawUsingCoreGraphics) { @@ -131,27 +131,34 @@ public class PictureInput: ImageSource { deinit { //debugPrint("Deallocating operation: \(self)") - imageFramebuffer.unlock() + self.imageFramebuffer?.unlock() } public func processImage(synchronously:Bool = false) { if synchronously { sharedImageProcessingContext.runOperationSynchronously{ - self.updateTargetsWithFramebuffer(self.imageFramebuffer) - self.hasProcessedImage = true + if let framebuffer = self.imageFramebuffer { + self.updateTargetsWithFramebuffer(framebuffer) + self.hasProcessedImage = true + } } } else { sharedImageProcessingContext.runOperationAsynchronously{ - self.updateTargetsWithFramebuffer(self.imageFramebuffer) - self.hasProcessedImage = true + if let framebuffer = self.imageFramebuffer { + self.updateTargetsWithFramebuffer(framebuffer) + self.hasProcessedImage = true + } } } } public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { - if hasProcessedImage { + // This gets called after the pipline gets adjusted and needs an image it + // Disabled so we can adjust/prepare the pipline freely without worrying an old framebuffer will get pushed through it + // If after changing the pipline you need the prior frame buffer to be reprocessed, call processImage() again. + /*if hasProcessedImage { imageFramebuffer.lock() target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) - } + }*/ } } From 2d510927ba1e0372c4f77373001999f47bca9276 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 22 Feb 2018 18:58:59 -0800 Subject: [PATCH 038/332] MovieInput audio support, code cleanup, optional Realtime threads --- framework/Source/iOS/Camera.swift | 3 +- framework/Source/iOS/MovieInput.swift | 139 +++++++++++++++---------- framework/Source/iOS/MovieOutput.swift | 10 ++ 3 files changed, 95 insertions(+), 57 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 6f7c944a..467d5a1c 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -50,10 +50,11 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var logFPS:Bool = false public var audioEncodingTarget:AudioEncodingTarget? { didSet { - guard let audioEncodingTarget = audioEncodingTarget else { + guard var audioEncodingTarget = audioEncodingTarget else { //self.removeAudioInputsAndOutputs() return } + audioEncodingTarget.shouldInvalidateAudioSampleWhenDone = false do { try self.addAudioInputsAndOutputs() audioEncodingTarget.activateAudioTrack() diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 186a33e7..5669d152 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -4,19 +4,32 @@ public class MovieInput: ImageSource { public let targets = TargetContainer() public var runBenchmark = false + public var audioEncodingTarget:AudioEncodingTarget? { + didSet { + guard var audioEncodingTarget = audioEncodingTarget else { + return + } + audioEncodingTarget.shouldInvalidateAudioSampleWhenDone = true + audioEncodingTarget.activateAudioTrack() + } + } + let yuvConversionShader:ShaderProgram let asset:AVAsset let videoComposition:AVVideoComposition? let playAtActualSpeed:Bool public var loop:Bool - public var startFrameTime:CMTime? + var startFrameTime:CMTime? public var currentFrameTime:CMTime? { get { return self.lastFrameTime } } - var currentThread:Thread? var lastFrameTime:CMTime? + + public var useRealtimeThreads = false + var timebaseInfo = mach_timebase_info_data_t() + var currentThread:Thread? var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 @@ -31,8 +44,6 @@ public class MovieInput: ImageSource { self.playAtActualSpeed = playAtActualSpeed self.loop = loop self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - - // TODO: Audio here } public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { @@ -49,7 +60,12 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Playback control // Only call these methods from the main thread - + + public func start(atTime: CMTime? = nil) { + self.startFrameTime = atTime + self.start() + } + @objc public func start() { if let currentThread = self.currentThread, currentThread.isExecuting, @@ -57,7 +73,7 @@ public class MovieInput: ImageSource { // If the current thread is running and has not been cancelled, bail. return } - // Just to be safe. + // Cancel the thread just to be safe incase we somehow get here with the thread still running self.currentThread?.cancel() self.currentThread = Thread(target: self, selector: #selector(beginReading), object: nil) @@ -97,6 +113,13 @@ public class MovieInput: ImageSource { assetReader.add(readerVideoTrackOutput) } + if let audioTrack = self.asset.tracks(withMediaType: AVMediaTypeAudio).first, + let _ = self.audioEncodingTarget { + let readerAudioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) + readerAudioTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerAudioTrackOutput) + } + if let startFrameTime = self.startFrameTime { assetReader.timeRange = CMTimeRange(start: startFrameTime, duration: kCMTimePositiveInfinity) } @@ -105,7 +128,7 @@ public class MovieInput: ImageSource { return assetReader } catch { - print("Could not create asset reader: \(error)") + print("ERROR: Unable to create asset reader: \(error)") } return nil } @@ -113,7 +136,13 @@ public class MovieInput: ImageSource { @objc func beginReading() { let thread = Thread.current - self.configureThread() + mach_timebase_info(&timebaseInfo) + if(useRealtimeThreads) { + self.configureThread() + } + else { + thread.qualityOfService = .userInteractive + } guard let assetReader = self.createReader() else { return // A return statement will end thread execution @@ -122,32 +151,37 @@ public class MovieInput: ImageSource { do { try ObjC.catchException { guard assetReader.startReading() else { - print("Couldn't start reading: \(assetReader.error)") + print("ERROR: Unable to start reading: \(assetReader.error)") return } } } catch { - print("Couldn't start reading: \(error)") + print("ERROR: Unable to start reading: \(error)") return } - var readerVideoTrackOutput:AVAssetReaderOutput? = nil; + var readerVideoTrackOutput:AVAssetReaderOutput? = nil + var readerAudioTrackOutput:AVAssetReaderOutput? = nil for output in assetReader.outputs { if(output.mediaType == AVMediaTypeVideo) { readerVideoTrackOutput = output } + if(output.mediaType == AVMediaTypeAudio) { + readerAudioTrackOutput = output + } } while(assetReader.status == .reading) { if(thread.isCancelled) { break } self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) + if let readerAudioTrackOutput = readerAudioTrackOutput { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } } assetReader.cancelReading() - // Since only the main thread will cancel threads + // Since only the main thread will cancel and create threads // jump onto the main thead to prevent the current thread from being cancelled // in between the below if statement check and creating the new thread DispatchQueue.main.async { @@ -160,49 +194,51 @@ public class MovieInput: ImageSource { } func readNextVideoFrame(with assetReader: AVAssetReader, from videoTrackOutput:AVAssetReaderOutput) { - if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { + guard let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() else { return } - let renderStart = DispatchTime.now() - var frameDurationNanos: Float64 = 0 + let renderStart = DispatchTime.now() + var frameDurationNanos: Float64 = 0 + + if (self.playAtActualSpeed) { + // Sample time eg. first frame is 0,30 second frame is 1,30 + let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + // Retrieve the rolling frame rate (duration between each frame) + let frameDuration = CMTimeSubtract(currentSampleTime, self.lastFrameTime ?? CMTimeAdd(currentSampleTime, CMTime(value: 1, timescale: 30))) + frameDurationNanos = CMTimeGetSeconds(frameDuration) * 1_000_000_000 - if (self.playAtActualSpeed) { - // Sample time eg. first frame is 0,30 second frame is 1,30 - let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - - // Retrieve the rolling frame rate (duration between each frame) - let frameDuration = CMTimeSubtract(currentSampleTime, self.lastFrameTime ?? CMTimeAdd(currentSampleTime, CMTime(value: 1, timescale: 30))) - frameDurationNanos = CMTimeGetSeconds(frameDuration) * 1_000_000_000 - - self.lastFrameTime = currentSampleTime - } + self.lastFrameTime = currentSampleTime + } + + sharedImageProcessingContext.runOperationSynchronously{ + self.process(movieFrame:sampleBuffer) + CMSampleBufferInvalidate(sampleBuffer) + } + + if(self.playAtActualSpeed) { + let renderEnd = DispatchTime.now() + // Find the amount of time it took to display the last frame + let renderDurationNanos = Double(renderEnd.uptimeNanoseconds - renderStart.uptimeNanoseconds) + // Find how much time we should wait to display the next frame. So it would be the frame duration minus the + // amount of time we already spent rendering the current frame. + let waitDurationNanos = Int(frameDurationNanos - renderDurationNanos) - sharedImageProcessingContext.runOperationSynchronously{ - self.process(movieFrame:sampleBuffer) - CMSampleBufferInvalidate(sampleBuffer) - } + // When the wait duration begins returning negative values consistently + // It means the OS is unable to provide enough processing time for the above work + // and that you need to adjust the real time thread policy below + //print("Render duration: \(String(format: "%.4f",renderDurationNanos / 1_000_000)) ms Wait duration: \(String(format: "%.4f",Double(waitDurationNanos) / 1_000_000)) ms") - if(self.playAtActualSpeed) { - let renderEnd = DispatchTime.now() - - // Find the amount of time it took to display the last frame - let renderDurationNanos = Double(renderEnd.uptimeNanoseconds - renderStart.uptimeNanoseconds) - - // Find how much time we should wait to display the next frame. So it would be the frame duration minus the - // amount of time we already spent rendering the current frame. - let waitDurationNanos = Int(frameDurationNanos - renderDurationNanos) - - // When the wait duration begins returning negative values consistently - // It means the OS is unable to provide enough processing time for the above work - // and that you need to adjust the real time thread policy below - //print("Render duration: \(String(format: "%.4f",renderDurationNanos / 1_000_000)) ms Wait duration: \(String(format: "%.4f",Double(waitDurationNanos) / 1_000_000)) ms") - - if(waitDurationNanos > 0) { - mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(waitDurationNanos))) - } + if(waitDurationNanos > 0) { + mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(waitDurationNanos))) } } } + func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput:AVAssetReaderOutput) { + guard let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() else { return } + + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) + } + func process(movieFrame frame:CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! @@ -210,8 +246,6 @@ public class MovieInput: ImageSource { self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) } - - //Code from pull request https://github.com/BradLarson/GPUImage2/pull/183 func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) @@ -256,8 +290,6 @@ public class MovieInput: ImageSource { return } - luminanceFramebuffer.lock() - var chrominanceGLTexture: CVOpenGLESTexture? glActiveTexture(GLenum(GL_TEXTURE1)) @@ -283,8 +315,6 @@ public class MovieInput: ImageSource { return } - chrominanceFramebuffer.lock() - self.movieFramebuffer?.unlock() let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) movieFramebuffer.lock() @@ -322,10 +352,7 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Thread configuration - var timebaseInfo = mach_timebase_info_data_t() - func configureThread() { - mach_timebase_info(&timebaseInfo) let clock2abs = Double(timebaseInfo.denom) / Double(timebaseInfo.numer) * Double(NSEC_PER_MSEC) // http://docs.huihoo.com/darwin/kernel-programming-guide/scheduler/chapter_8_section_4.html diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 1ffcefc4..fc5b1d62 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -3,6 +3,8 @@ import AVFoundation extension String: Error {} public protocol AudioEncodingTarget { + var shouldInvalidateAudioSampleWhenDone: Bool { get set } + func activateAudioTrack() func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) } @@ -11,6 +13,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public let sources = SourceContainer() public let maximumInputs:UInt = 1 + public var shouldInvalidateAudioSampleWhenDone: Bool = false + let assetWriter:AVAssetWriter let assetWriterVideoInput:AVAssetWriterInput var assetWriterAudioInput:AVAssetWriterInput? @@ -273,6 +277,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { guard let assetWriterAudioInput = assetWriterAudioInput else { return } movieProcessingContext.runOperationAsynchronously{ + defer { + if(self.shouldInvalidateAudioSampleWhenDone) { + CMSampleBufferInvalidate(sampleBuffer) + } + } + guard self.isRecording else { return } guard self.assetWriter.status == .writing else { return } guard !self.audioEncodingIsFinished else { return } From 42e8c3e5102578c2dcb48f16517f4718f9e81b17 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sat, 24 Feb 2018 16:53:40 -0800 Subject: [PATCH 039/332] MovieInput completion block and delegate call Remove unneeded sampleTime var on framebuffer since there is a timingstyle var Delegate on camera should not be retained --- framework/Source/BasicOperation.swift | 3 -- framework/Source/Framebuffer.swift | 4 +-- framework/Source/FramebufferCache.swift | 1 - .../Operations/TransformOperation.swift | 1 + framework/Source/iOS/Camera.swift | 4 +-- framework/Source/iOS/MovieInput.swift | 32 ++++++++++++------- framework/Source/iOS/MovieOutput.swift | 2 ++ 7 files changed, 27 insertions(+), 20 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index ecd3931c..46dfc13f 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -99,9 +99,6 @@ open class BasicOperation: ImageProcessingOperation { if (UInt(inputFramebuffers.count) >= maximumInputs) { renderFrame() - // Carry this over if we have it - outputFramebuffer.sampleTime = framebuffer.sampleTime - updateTargetsWithFramebuffer(outputFramebuffer) } } diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 228a32d4..f0e720e9 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -33,7 +33,7 @@ public enum FramebufferTimingStyle { } } - var timestamp:Timestamp? { + public var timestamp:Timestamp? { get { switch self { case .stillImage: return nil @@ -60,8 +60,6 @@ public class Framebuffer { unowned var context:OpenGLContext - public var sampleTime: CMTime? - public init(context:OpenGLContext, orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false, overriddenTexture:GLuint? = nil) throws { self.context = context self.size = size diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index 62eb50ae..fcc83d13 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -34,7 +34,6 @@ public class FramebufferCache { //print("Restoring previous framebuffer") framebuffer = framebufferCache[hash]!.removeLast() framebuffer.orientation = orientation - framebuffer.sampleTime = nil } else { do { //debugPrint("Generating new framebuffer at size: \(size)") diff --git a/framework/Source/Operations/TransformOperation.swift b/framework/Source/Operations/TransformOperation.swift index e89a7300..1dcc73f8 100644 --- a/framework/Source/Operations/TransformOperation.swift +++ b/framework/Source/Operations/TransformOperation.swift @@ -42,6 +42,7 @@ open class TransformOperation: BasicOperation { } func normalizedImageVerticesForAspectRatio(_ aspectRatio:Float) -> [GLfloat] { + // [TopLeft.x, TopLeft.y, TopRight.x, TopRight.y, BottomLeft.x, BottomLeft.y, BottomRight.x, BottomRight.y] if(anchorTopLeft) { return [0.0, 0.0, 1.0, 0.0, 0.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] } diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 467d5a1c..c6cfe3fe 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -1,7 +1,7 @@ import Foundation import AVFoundation -public protocol CameraDelegate { +public protocol CameraDelegate: class { func didCaptureBuffer(_ sampleBuffer: CMSampleBuffer) } public enum PhysicalCameraLocation { @@ -65,7 +65,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } public let targets = TargetContainer() - public var delegate: CameraDelegate? + public weak var delegate: CameraDelegate? public let captureSession:AVCaptureSession public let inputCamera:AVCaptureDevice! let videoInput:AVCaptureDeviceInput! diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 5669d152..5c31f566 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -1,9 +1,15 @@ import AVFoundation +public protocol MovieInputDelegate: class { + func didFinishMovie() +} + public class MovieInput: ImageSource { public let targets = TargetContainer() public var runBenchmark = false + public weak var delegate: MovieInputDelegate? + public var audioEncodingTarget:AudioEncodingTarget? { didSet { guard var audioEncodingTarget = audioEncodingTarget else { @@ -18,14 +24,16 @@ public class MovieInput: ImageSource { let asset:AVAsset let videoComposition:AVVideoComposition? let playAtActualSpeed:Bool - public var loop:Bool var startFrameTime:CMTime? + var lastFrameTime:CMTime? + + public var loop:Bool public var currentFrameTime:CMTime? { get { return self.lastFrameTime } } - var lastFrameTime:CMTime? + public var completion: (() -> Void)? public var useRealtimeThreads = false var timebaseInfo = mach_timebase_info_data_t() @@ -59,9 +67,8 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Playback control - // Only call these methods from the main thread - public func start(atTime: CMTime? = nil) { + public func start(atTime: CMTime) { self.startFrameTime = atTime self.start() } @@ -73,7 +80,7 @@ public class MovieInput: ImageSource { // If the current thread is running and has not been cancelled, bail. return } - // Cancel the thread just to be safe incase we somehow get here with the thread still running + // Cancel the thread just to be safe in the event we somehow get here with the thread still running self.currentThread?.cancel() self.currentThread = Thread(target: self, selector: #selector(beginReading), object: nil) @@ -102,7 +109,7 @@ public class MovieInput: ImageSource { let assetReader = try AVAssetReader.init(asset: self.asset) if(self.videoComposition == nil) { - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: AVMediaTypeVideo).first!, outputSettings:outputSettings) readerVideoTrackOutput.alwaysCopiesSampleData = false assetReader.add(readerVideoTrackOutput) } @@ -137,6 +144,7 @@ public class MovieInput: ImageSource { let thread = Thread.current mach_timebase_info(&timebaseInfo) + if(useRealtimeThreads) { self.configureThread() } @@ -181,15 +189,18 @@ public class MovieInput: ImageSource { assetReader.cancelReading() - // Since only the main thread will cancel and create threads - // jump onto the main thead to prevent the current thread from being cancelled - // in between the below if statement check and creating the new thread + // Since only the main thread will cancel and create threads jump onto it to prevent + // the current thread from being cancelled in between the below if statement and creating the new thread DispatchQueue.main.async { // Start the video over so long as it wasn't cancelled if (self.loop && !thread.isCancelled) { self.currentThread = Thread(target: self, selector: #selector(self.beginReading), object: nil) self.currentThread?.start() } + else { + self.delegate?.didFinishMovie() + self.completion?() + } } } @@ -200,7 +211,7 @@ public class MovieInput: ImageSource { var frameDurationNanos: Float64 = 0 if (self.playAtActualSpeed) { - // Sample time eg. first frame is 0,30 second frame is 1,30 + // Sample time e.g. first frame is 0,30 second frame is 1,30 let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) // Retrieve the rolling frame rate (duration between each frame) let frameDuration = CMTimeSubtract(currentSampleTime, self.lastFrameTime ?? CMTimeAdd(currentSampleTime, CMTime(value: 1, timescale: 30))) @@ -318,7 +329,6 @@ public class MovieInput: ImageSource { self.movieFramebuffer?.unlock() let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) movieFramebuffer.lock() - movieFramebuffer.sampleTime = withSampleTime convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index fc5b1d62..a630d9e9 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -123,6 +123,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { completionCallback?(true) } catch { + print("Unable to start recording: \(error)") + self.assetWriter.cancelWriting() self.isRecording = false From 4ca1c2bf35e3bc1cfb2fcdc0059c09ba8dc81ed5 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 25 Feb 2018 13:40:56 -0800 Subject: [PATCH 040/332] Full syncronized encoding support --- framework/Source/iOS/MovieInput.swift | 127 +++++++++++++++++++++++-- framework/Source/iOS/MovieOutput.swift | 59 +++++++++--- 2 files changed, 164 insertions(+), 22 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 5c31f566..c764f867 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -4,6 +4,8 @@ public protocol MovieInputDelegate: class { func didFinishMovie() } +let synchronizedEncodingDebug = false + public class MovieInput: ImageSource { public let targets = TargetContainer() public var runBenchmark = false @@ -17,13 +19,16 @@ public class MovieInput: ImageSource { } audioEncodingTarget.shouldInvalidateAudioSampleWhenDone = true audioEncodingTarget.activateAudioTrack() + + // Call enableSyncronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput + if(synchronizedMovieOutput != nil) { self.enableSyncronizedEncoding() } } } let yuvConversionShader:ShaderProgram let asset:AVAsset let videoComposition:AVVideoComposition? - let playAtActualSpeed:Bool + var playAtActualSpeed:Bool var startFrameTime:CMTime? var lastFrameTime:CMTime? @@ -35,16 +40,25 @@ public class MovieInput: ImageSource { } public var completion: (() -> Void)? + public var synchronizedMovieOutput:MovieOutput? { + didSet { + self.enableSyncronizedEncoding() + } + } + let conditionLock = NSCondition() + var readingShouldWait = false + var videoInputStatusObserver:NSKeyValueObservation? + var audioInputStatusObserver:NSKeyValueObservation? + public var useRealtimeThreads = false var timebaseInfo = mach_timebase_info_data_t() var currentThread:Thread? - + var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 var movieFramebuffer:Framebuffer? - // TODO: Add movie reader synchronization // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false) throws { self.asset = asset @@ -63,6 +77,9 @@ public class MovieInput: ImageSource { deinit { self.movieFramebuffer?.unlock() self.cancel() + + self.videoInputStatusObserver?.invalidate() + self.audioInputStatusObserver?.invalidate() } // MARK: - @@ -148,12 +165,15 @@ public class MovieInput: ImageSource { if(useRealtimeThreads) { self.configureThread() } - else { + else if(playAtActualSpeed) { thread.qualityOfService = .userInteractive } + else { + thread.qualityOfService = .default // Synchronized encoding + } guard let assetReader = self.createReader() else { - return // A return statement will end thread execution + return // A return statement in this frame will end thread execution } do { @@ -183,8 +203,28 @@ public class MovieInput: ImageSource { while(assetReader.status == .reading) { if(thread.isCancelled) { break } - self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) - if let readerAudioTrackOutput = readerAudioTrackOutput { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } + + if let movieOutput = self.synchronizedMovieOutput { + self.conditionLock.lock() + if(self.readingShouldWait) { + if(synchronizedEncodingDebug) { print("Disable reading") } + self.conditionLock.wait() + if(synchronizedEncodingDebug) { print("Enable reading") } + } + self.conditionLock.unlock() + + if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData) { + self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) + } + if(movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { + if let readerAudioTrackOutput = readerAudioTrackOutput { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } + } + } + else { + self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) + if let readerAudioTrackOutput = readerAudioTrackOutput { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } + } + } assetReader.cancelReading() @@ -200,13 +240,26 @@ public class MovieInput: ImageSource { else { self.delegate?.didFinishMovie() self.completion?() + + if(self.synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print("Synchronized encoding finished") } } } } func readNextVideoFrame(with assetReader: AVAssetReader, from videoTrackOutput:AVAssetReaderOutput) { - guard let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() else { return } + guard let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() else { + if let movieOutput = self.synchronizedMovieOutput { + movieOutput.movieProcessingContext.runOperationAsynchronously { + movieOutput.videoEncodingIsFinished = true + movieOutput.assetWriterVideoInput.markAsFinished() + } + } + return + } + + if(self.synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print("Process frame input") } + let renderStart = DispatchTime.now() var frameDurationNanos: Float64 = 0 @@ -245,7 +298,18 @@ public class MovieInput: ImageSource { } func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput:AVAssetReaderOutput) { - guard let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() else { return } + guard let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() else { + if let movieOutput = self.synchronizedMovieOutput { + movieOutput.movieProcessingContext.runOperationAsynchronously { + movieOutput.audioEncodingIsFinished = true + movieOutput.assetWriterAudioInput?.markAsFinished() + } + } + + return + } + + if(self.synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print("Process audio sample input") } self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) } @@ -359,6 +423,51 @@ public class MovieInput: ImageSource { } } + // MARK: - + // MARK: Syncronized encoding + + func enableSyncronizedEncoding() { + self.synchronizedMovieOutput?.encodingLiveVideo = false + self.playAtActualSpeed = false + self.loop = false + + // Subscribe to isReadyForMoreMediaData changes + self.setupObservers() + // Set the intial state of the lock + self.updateLock() + } + + func setupObservers() { + self.videoInputStatusObserver?.invalidate() + self.audioInputStatusObserver?.invalidate() + + guard let movieOutput = self.synchronizedMovieOutput else { return } + + self.videoInputStatusObserver = movieOutput.assetWriterVideoInput.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] (assetWriterVideoInput, change) in + guard let weakSelf = self else { return } + weakSelf.updateLock() + } + self.audioInputStatusObserver = movieOutput.assetWriterAudioInput?.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] (assetWriterAudioInput, change) in + guard let weakSelf = self else { return } + weakSelf.updateLock() + } + } + + func updateLock() { + guard let movieOutput = self.synchronizedMovieOutput else { return } + + self.conditionLock.lock() + // Allow reading if either input is able to accept data, prevent reading if both inputs are unable to accept data. + if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData || movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { + self.readingShouldWait = false + self.conditionLock.signal() + } + else { + self.readingShouldWait = true + } + self.conditionLock.unlock() + } + // MARK: - // MARK: Thread configuration diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index a630d9e9..e91744f7 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -25,13 +25,18 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private var isRecording = false private var isFinishing = false private var finishRecordingCompletionCallback:(() -> Void)? = nil - private var videoEncodingIsFinished = false - private var audioEncodingIsFinished = false + var videoEncodingIsFinished = false + var audioEncodingIsFinished = false private var startTime:CMTime? private var firstFrameTime: CMTime? private var previousFrameTime: CMTime? private var previousAudioTime: CMTime? - private var encodingLiveVideo:Bool + var encodingLiveVideo:Bool { + didSet { + assetWriterVideoInput.expectsMediaDataInRealTime = encodingLiveVideo + assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo + } + } var pixelBuffer:CVPixelBuffer? = nil var renderFramebuffer:Framebuffer! @@ -192,13 +197,16 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { glFinish(); movieProcessingContext.runOperationAsynchronously { - guard self.renderFramebuffer != nil else { return } - guard self.isRecording else { return } - guard self.assetWriter.status == .writing else { return } - guard !self.videoEncodingIsFinished else { return } + guard self.renderFramebuffer != nil, + self.isRecording, + self.assetWriter.status == .writing, + !self.videoEncodingIsFinished else { return } // Ignore still images and other non-video updates (do I still need this?) - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { + return + + } // Check if we are finishing and if this frame is later than the last recorded audio buffer // Note: isFinishing is only set when there is an audio buffer, otherwise the video is finished immediately @@ -211,7 +219,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case - guard (frameTime != self.previousFrameTime) else { return } + guard (frameTime != self.previousFrameTime) else { + return + } if (self.startTime == nil) { self.assetWriter.startSession(atSourceTime: frameTime) @@ -226,6 +236,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return } + while(!self.assetWriterVideoInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.videoEncodingIsFinished) { + if(synchronizedEncodingDebug) { print("Video waiting...") } + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds + } + if !self.movieProcessingContext.supportsTextureCaches() { let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { return } @@ -233,6 +250,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) + if(synchronizedEncodingDebug) { print("Process frame output") } + if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { debugPrint("Problem appending pixel buffer at time: \(frameTime)") } @@ -278,16 +297,16 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) { guard let assetWriterAudioInput = assetWriterAudioInput else { return } - movieProcessingContext.runOperationAsynchronously{ + let work = { defer { if(self.shouldInvalidateAudioSampleWhenDone) { CMSampleBufferInvalidate(sampleBuffer) } } - guard self.isRecording else { return } - guard self.assetWriter.status == .writing else { return } - guard !self.audioEncodingIsFinished else { return } + guard self.isRecording, + self.assetWriter.status == .writing, + !self.audioEncodingIsFinished else { return } let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) @@ -311,10 +330,24 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return } + while(!assetWriterAudioInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.audioEncodingIsFinished) { + if(synchronizedEncodingDebug) { print("Audio waiting...") } + usleep(100000) + } + + if(synchronizedEncodingDebug) { print("Process audio sample output") } + if (!assetWriterAudioInput.append(sampleBuffer)) { print("Trouble appending audio sample buffer: \(self.assetWriter.error)") } } + + if(self.encodingLiveVideo) { + movieProcessingContext.runOperationAsynchronously(work) + } + else { + work() + } } } From 57ef20fc2a37e43dadcb239bb139163f08e9ab47 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 25 Feb 2018 14:13:23 -0800 Subject: [PATCH 041/332] Code cleanup --- framework/GPUImage.xcodeproj/project.pbxproj | 24 +++++++++---------- framework/Source/GPUImage-Bridging-Header.h | 2 +- .../Source/{ObjC.h => NSObject+Exception.h} | 4 ++-- .../Source/{ObjC.m => NSObject+Exception.m} | 7 +++--- framework/Source/iOS/Camera.swift | 2 +- framework/Source/iOS/MovieInput.swift | 5 ++-- framework/Source/iOS/MovieOutput.swift | 6 ++--- 7 files changed, 25 insertions(+), 25 deletions(-) rename framework/Source/{ObjC.h => NSObject+Exception.h} (77%) rename framework/Source/{ObjC.m => NSObject+Exception.m} (72%) diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index 9f034e28..0b0c9869 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -7,10 +7,10 @@ objects = { /* Begin PBXBuildFile section */ - 1F499A731FDA0F9F0000E37E /* ObjC.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* ObjC.m */; }; - 1F499A741FDA0F9F0000E37E /* ObjC.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* ObjC.m */; }; - 1F499A751FDA0F9F0000E37E /* ObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* ObjC.h */; }; - 1F499A761FDA0F9F0000E37E /* ObjC.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* ObjC.h */; }; + 1F499A731FDA0F9F0000E37E /* NSObject+Exception.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */; }; + 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */; }; + 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; + 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; BC0923A11C92661D00A2ADFA /* Pipeline_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */; }; BC0923A21C92664900A2ADFA /* Framebuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB279EB1C8D11630013E213 /* Framebuffer.swift */; }; @@ -374,8 +374,8 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ - 1F499A711FDA0F9E0000E37E /* ObjC.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = ObjC.m; path = Source/ObjC.m; sourceTree = ""; }; - 1F499A721FDA0F9F0000E37E /* ObjC.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ObjC.h; path = Source/ObjC.h; sourceTree = ""; }; + 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSObject+Exception.m"; path = "Source/NSObject+Exception.m"; sourceTree = ""; }; + 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSObject+Exception.h"; path = "Source/NSObject+Exception.h"; sourceTree = ""; }; 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "GPUImage-Bridging-Header.h"; path = "Source/GPUImage-Bridging-Header.h"; sourceTree = ""; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Pipeline_Tests.swift; path = Tests/Pipeline_Tests.swift; sourceTree = SOURCE_ROOT; }; @@ -968,8 +968,8 @@ BC6E7CAD1C39A9D8006DF678 /* Other */ = { isa = PBXGroup; children = ( - 1F499A721FDA0F9F0000E37E /* ObjC.h */, - 1F499A711FDA0F9E0000E37E /* ObjC.m */, + 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */, + 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */, BC4C85ED1C9F042900FD95D8 /* ConvertedShaders_GL.swift */, BC9E35531E52521F00B8604F /* ConvertedShaders_GLES.swift */, 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */, @@ -1208,7 +1208,7 @@ isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( - 1F499A751FDA0F9F0000E37E /* ObjC.h in Headers */, + 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1216,7 +1216,7 @@ isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( - 1F499A761FDA0F9F0000E37E /* ObjC.h in Headers */, + 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1467,7 +1467,7 @@ BC7FD11C1CB0795A00037949 /* NormalBlend.swift in Sources */, BC4EE15E1CB3481F00AD8A65 /* ThresholdSobelEdgeDetection.swift in Sources */, BC7FD1911CB1D2A300037949 /* ImageGenerator.swift in Sources */, - 1F499A731FDA0F9F0000E37E /* ObjC.m in Sources */, + 1F499A731FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC7FD1201CB079B200037949 /* SaturationBlend.swift in Sources */, BCA4E2491CC3EF26007B51BA /* ColourFASTFeatureDetection.swift in Sources */, BC7FD0FD1CB06E0000037949 /* Position.swift in Sources */, @@ -1651,7 +1651,7 @@ BC9E356E1E5256CE00B8604F /* FalseColor.swift in Sources */, BC9E35881E52572000B8604F /* ThresholdSobelEdgeDetection.swift in Sources */, BC9E356F1E5256D000B8604F /* HighlightsAndShadows.swift in Sources */, - 1F499A741FDA0F9F0000E37E /* ObjC.m in Sources */, + 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC9E35AA1E52578900B8604F /* Halftone.swift in Sources */, BC9E35961E52574A00B8604F /* ImageBuffer.swift in Sources */, BC9E35831E52571100B8604F /* LocalBinaryPattern.swift in Sources */, diff --git a/framework/Source/GPUImage-Bridging-Header.h b/framework/Source/GPUImage-Bridging-Header.h index 4ca4ed7e..3ba6a8ce 100644 --- a/framework/Source/GPUImage-Bridging-Header.h +++ b/framework/Source/GPUImage-Bridging-Header.h @@ -9,6 +9,6 @@ #ifndef GPUImage_Bridging_Header_h #define GPUImage_Bridging_Header_h -#import "ObjC.h" +#import "NSObject+Exception.h" #endif /* GPUImage_Bridging_Header_h */ diff --git a/framework/Source/ObjC.h b/framework/Source/NSObject+Exception.h similarity index 77% rename from framework/Source/ObjC.h rename to framework/Source/NSObject+Exception.h index 86dbae1d..bb0bf010 100644 --- a/framework/Source/ObjC.h +++ b/framework/Source/NSObject+Exception.h @@ -1,5 +1,5 @@ // -// ObjC.h +// NSObject+Exception.h // GPUImage2 // // Created by Josh Bernfeld on 11/23/17. @@ -7,7 +7,7 @@ #import -@interface ObjC : NSObject +@interface NSObject (Exception) + (BOOL)catchException:(void(^)(void))tryBlock error:(__autoreleasing NSError **)error; diff --git a/framework/Source/ObjC.m b/framework/Source/NSObject+Exception.m similarity index 72% rename from framework/Source/ObjC.m rename to framework/Source/NSObject+Exception.m index ad687703..ed6d3711 100644 --- a/framework/Source/ObjC.m +++ b/framework/Source/NSObject+Exception.m @@ -1,13 +1,14 @@ // -// ObjC.m +// NSObject+Exception.m // GPUImage2 // // Created by Josh Bernfeld on 11/23/17. // +// Source: https://stackoverflow.com/a/36454808/1275014 -#import "ObjC.h" +#import "NSObject+Exception.h" -@implementation ObjC +@implementation NSObject (Exception) + (BOOL)catchException:(void(^)(void))tryBlock error:(__autoreleasing NSError **)error { @try { diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index c6cfe3fe..356f4e81 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -184,7 +184,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } func captureSessionRuntimeError(note: NSNotification) { - print("Capture Session Runtime Error: \(note.userInfo)") + print("ERROR: Capture session runtime error: \(String(describing: note.userInfo))") if(self.captureSessionRestartAttempts < 1) { DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { self.startCapture() diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index c764f867..4265d18d 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -177,9 +177,9 @@ public class MovieInput: ImageSource { } do { - try ObjC.catchException { + try NSObject.catchException { guard assetReader.startReading() else { - print("ERROR: Unable to start reading: \(assetReader.error)") + print("ERROR: Unable to start reading: \(String(describing: assetReader.error))") return } } @@ -224,7 +224,6 @@ public class MovieInput: ImageSource { self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) if let readerAudioTrackOutput = readerAudioTrackOutput { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } } - } assetReader.cancelReading() diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index e91744f7..ca5b26c8 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -90,12 +90,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { movieProcessingContext.runOperationAsynchronously { do { - try ObjC.catchException { + try NSObject.catchException { self.isRecording = self.assetWriter.startWriting() } if(!self.isRecording) { - throw "Could not start asset writer: \(self.assetWriter.error)" + throw "Could not start asset writer: \(String(describing: self.assetWriter.error))" } guard let pixelBufferPool = self.assetWriterPixelBufferInput.pixelBufferPool else { @@ -338,7 +338,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if(synchronizedEncodingDebug) { print("Process audio sample output") } if (!assetWriterAudioInput.append(sampleBuffer)) { - print("Trouble appending audio sample buffer: \(self.assetWriter.error)") + print("Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") } } From 59219c3bc9608cf5d351e21cd8ec4a681b406062 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Tue, 27 Feb 2018 13:24:10 -0800 Subject: [PATCH 042/332] Support for custom audio settings, renderView present with transaction is disabled by default --- framework/Source/iOS/Camera.swift | 6 +- framework/Source/iOS/MovieInput.swift | 15 ++-- framework/Source/iOS/MovieOutput.swift | 100 ++++++++++++++----------- framework/Source/iOS/RenderView.swift | 17 ++++- 4 files changed, 84 insertions(+), 54 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 356f4e81..019d7c64 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -51,10 +51,10 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var audioEncodingTarget:AudioEncodingTarget? { didSet { guard var audioEncodingTarget = audioEncodingTarget else { + // Removing the audio inputs and outputs causes a black flash on the video output //self.removeAudioInputsAndOutputs() return } - audioEncodingTarget.shouldInvalidateAudioSampleWhenDone = false do { try self.addAudioInputsAndOutputs() audioEncodingTarget.activateAudioTrack() @@ -72,7 +72,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public let videoOutput:AVCaptureVideoDataOutput! var microphone:AVCaptureDevice? var audioInput:AVCaptureDeviceInput? - var audioOutput:AVCaptureAudioDataOutput? + public var audioOutput:AVCaptureAudioDataOutput? var supportsFullYUVRange:Bool = false let captureAsYUV:Bool @@ -350,6 +350,6 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } func processAudioSampleBuffer(_ sampleBuffer:CMSampleBuffer) { - self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) } } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 4265d18d..36aaea69 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -17,11 +17,10 @@ public class MovieInput: ImageSource { guard var audioEncodingTarget = audioEncodingTarget else { return } - audioEncodingTarget.shouldInvalidateAudioSampleWhenDone = true audioEncodingTarget.activateAudioTrack() - // Call enableSyncronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput - if(synchronizedMovieOutput != nil) { self.enableSyncronizedEncoding() } + // Call enableSynchronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput + if(synchronizedMovieOutput != nil) { self.enableSynchronizedEncoding() } } } @@ -42,7 +41,7 @@ public class MovieInput: ImageSource { public var synchronizedMovieOutput:MovieOutput? { didSet { - self.enableSyncronizedEncoding() + self.enableSynchronizedEncoding() } } let conditionLock = NSCondition() @@ -249,6 +248,8 @@ public class MovieInput: ImageSource { guard let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() else { if let movieOutput = self.synchronizedMovieOutput { movieOutput.movieProcessingContext.runOperationAsynchronously { + // Clients that are monitoring each input's readyForMoreMediaData value must call markAsFinished on an input when they are done appending buffers to it. + // This is necessary to prevent other inputs from stalling, as they may otherwise wait forever for that input's media data, attempting to complete the ideal interleaving pattern. movieOutput.videoEncodingIsFinished = true movieOutput.assetWriterVideoInput.markAsFinished() } @@ -310,7 +311,7 @@ public class MovieInput: ImageSource { if(self.synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print("Process audio sample input") } - self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: true) } func process(movieFrame frame:CMSampleBuffer) { @@ -423,9 +424,9 @@ public class MovieInput: ImageSource { } // MARK: - - // MARK: Syncronized encoding + // MARK: Synchronized encoding - func enableSyncronizedEncoding() { + func enableSynchronizedEncoding() { self.synchronizedMovieOutput?.encodingLiveVideo = false self.playAtActualSpeed = false self.loop = false diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index ca5b26c8..06929d67 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -3,18 +3,14 @@ import AVFoundation extension String: Error {} public protocol AudioEncodingTarget { - var shouldInvalidateAudioSampleWhenDone: Bool { get set } - func activateAudioTrack() - func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) + func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) } public class MovieOutput: ImageConsumer, AudioEncodingTarget { public let sources = SourceContainer() public let maximumInputs:UInt = 1 - public var shouldInvalidateAudioSampleWhenDone: Bool = false - let assetWriter:AVAssetWriter let assetWriterVideoInput:AVAssetWriterInput var assetWriterAudioInput:AVAssetWriterInput? @@ -40,9 +36,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var pixelBuffer:CVPixelBuffer? = nil var renderFramebuffer:Framebuffer! + var audioSettings:[String:Any]? = nil + var shouldPassthroughAudio:Bool + let movieProcessingContext:OpenGLContext - public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { + public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, audioSettings:[String:Any]? = nil, shouldPassthroughAudio:Bool = false) throws { imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup // Since we cannot access self before calling super, initialize here and not above let movieProcessingContext = OpenGLContext() @@ -59,29 +58,32 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) - var localSettings:[String:AnyObject] - if let settings = settings { - localSettings = settings + var localSettings:[String:Any] + if let videoSettings = videoSettings { + localSettings = videoSettings } else { - localSettings = [String:AnyObject]() + localSettings = [String:Any]() } - localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) - localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString + localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? size.width + localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? size.height + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. - let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value:Int32(kCVPixelFormatType_32BGRA)), - kCVPixelBufferWidthKey as String:NSNumber(value:self.size.width), - kCVPixelBufferHeightKey as String:NSNumber(value:self.size.height)] + let sourcePixelBufferAttributesDictionary:[String:Any] = [kCVPixelBufferPixelFormatTypeKey as String:Int32(kCVPixelFormatType_32BGRA), + kCVPixelBufferWidthKey as String:self.size.width, + kCVPixelBufferHeightKey as String:self.size.height] assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) assetWriter.add(assetWriterVideoInput) + self.audioSettings = audioSettings + self.shouldPassthroughAudio = shouldPassthroughAudio + self.movieProcessingContext = movieProcessingContext } @@ -141,15 +143,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func finishRecording(_ completionCallback:(() -> Void)? = nil) { movieProcessingContext.runOperationAsynchronously{ - guard self.isRecording else { return } - guard !self.isFinishing else { return } + guard self.isRecording, + !self.isFinishing, + self.assetWriter.status == .writing else { + completionCallback?() + return + } self.finishRecordingCompletionCallback = completionCallback - - if (self.assetWriter.status != .writing) { - completionCallback?() - return - } self.finishAudioWriting() @@ -158,7 +159,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Video will finish once a there is a frame time that is later than the last recorded audio buffer time self.isFinishing = true - // Call finishVideoWriting again just incase we don't recieve any additional buffers + // Call finishVideoWriting just incase we don't recieve any additional audio buffers self.movieProcessingContext.serialDispatchQueue.asyncAfter(deadline: .now() + 0.1) { self.finishVideoWriting() } @@ -176,18 +177,18 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.isFinishing = false self.isRecording = false - if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) { + if (self.assetWriter.status == .writing && !self.videoEncodingIsFinished) { self.videoEncodingIsFinished = true self.assetWriterVideoInput.markAsFinished() } - self.assetWriter.finishWriting{ + self.assetWriter.finishWriting { self.finishRecordingCompletionCallback?() } } private func finishAudioWriting() { - if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) { + if (self.assetWriter.status == .writing && !self.audioEncodingIsFinished) { self.audioEncodingIsFinished = true self.assetWriterAudioInput?.markAsFinished() } @@ -203,10 +204,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { !self.videoEncodingIsFinished else { return } // Ignore still images and other non-video updates (do I still need this?) - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { - return - - } + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } // Check if we are finishing and if this frame is later than the last recorded audio buffer // Note: isFinishing is only set when there is an audio buffer, otherwise the video is finished immediately @@ -219,9 +217,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case - guard (frameTime != self.previousFrameTime) else { - return - } + guard (frameTime != self.previousFrameTime) else { return } if (self.startTime == nil) { self.assetWriter.startSession(atSourceTime: frameTime) @@ -250,7 +246,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) - if(synchronizedEncodingDebug) { print("Process frame output") } + if(synchronizedEncodingDebug && !self.encodingLiveVideo) { print("Process frame output") } if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { debugPrint("Problem appending pixel buffer at time: \(frameTime)") @@ -288,25 +284,43 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Audio support public func activateAudioTrack() { - // TODO: Add ability to set custom output settings - assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:nil) + var settings:[String:Any]? = nil + if let audioSettings = self.audioSettings { + settings = audioSettings + } + else { + var acl = AudioChannelLayout() + memset(&acl, 0, MemoryLayout.size) + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo + + settings = [ + AVFormatIDKey:kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey:2, + AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), + AVEncoderBitRateKey:64000 + ] + } + + if(shouldPassthroughAudio) { settings = nil } + + assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:settings) assetWriter.add(assetWriterAudioInput!) assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } - public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) { - guard let assetWriterAudioInput = assetWriterAudioInput else { return } - + public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { let work = { defer { - if(self.shouldInvalidateAudioSampleWhenDone) { + if(shouldInvalidateSampleWhenDone) { CMSampleBufferInvalidate(sampleBuffer) } } guard self.isRecording, self.assetWriter.status == .writing, - !self.audioEncodingIsFinished else { return } + !self.audioEncodingIsFinished, + let assetWriterAudioInput = self.assetWriterAudioInput else { return } let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) @@ -335,7 +349,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { usleep(100000) } - if(synchronizedEncodingDebug) { print("Process audio sample output") } + if(synchronizedEncodingDebug && !self.encodingLiveVideo) { print("Process audio sample output") } if (!assetWriterAudioInput.append(sampleBuffer)) { print("Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 51f451b9..12547c4a 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -15,6 +15,8 @@ public class RenderView:UIView, ImageConsumer { public var orientation:ImageOrientation = .portrait public var sizeInPixels:Size { get { return Size(width:Float(frame.size.width * contentScaleFactor), height:Float(frame.size.height * contentScaleFactor))}} + public var shouldPresentWithTransaction = false + public let sources = SourceContainer() public let maximumInputs:UInt = 1 var displayFramebuffer:GLuint? @@ -94,6 +96,17 @@ public class RenderView:UIView, ImageConsumer { displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) + // Without the flush I occasionally get a warning from UIKit on the camera renderView and + // when the warning comes in the renderView just stays black. This happens rarely but often enough to be a problem. + // I tried a transaction and it doesn't silence it and this is likely why --> http://danielkbx.com/post/108060601989/catransaction-flush + // This flush defeats the purpose of presentWithTransaction() so it should only be enabled when you need it. + // The idea with presentWithTransaction() is to be able to change the bounds of this renderView, then draw contents into it + // at the correct bounds without any blips in between. If you have this flush() in place it will force a layout pass in the middle of that + // causing the old contents to be briefly distorted while the new contents are yet to be drawn. + // That is why this shouldn't be used in media playback scenarios. + if(!shouldPresentWithTransaction) { + CATransaction.flush() + } sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.internalLayer) var backingWidth:GLint = 0 @@ -130,7 +143,9 @@ public class RenderView:UIView, ImageConsumer { // as soon as presentBufferForDisplay() is called. // Source --> https://stackoverflow.com/a/30722276/1275014 // Source --> https://developer.apple.com/documentation/quartzcore/caeagllayer/1618676-presentswithtransaction - self.presentWithTransaction() + if(shouldPresentWithTransaction) { + self.presentWithTransaction() + } return true } From 49f6ea52aac3c318db46f155958ea077c9d6d946 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Wed, 28 Feb 2018 00:18:39 -0800 Subject: [PATCH 043/332] Resolve dropped frames at the beginning of recording, videos sometimes ending 2-3 seconds early, move all audio logic to client side --- framework/Source/iOS/MovieInput.swift | 7 +- framework/Source/iOS/MovieOutput.swift | 128 +++++++++++-------------- 2 files changed, 61 insertions(+), 74 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 36aaea69..c02d9065 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -56,15 +56,18 @@ public class MovieInput: ImageSource { var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 + var audioSettings:[String:Any]? + var movieFramebuffer:Framebuffer? // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works - public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false, audioSettings:[String:Any]? = nil) throws { self.asset = asset self.videoComposition = videoComposition self.playAtActualSpeed = playAtActualSpeed self.loop = loop self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} + self.audioSettings = audioSettings } public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { @@ -138,7 +141,7 @@ public class MovieInput: ImageSource { if let audioTrack = self.asset.tracks(withMediaType: AVMediaTypeAudio).first, let _ = self.audioEncodingTarget { - let readerAudioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) + let readerAudioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: audioSettings) readerAudioTrackOutput.alwaysCopiesSampleData = false assetReader.add(readerAudioTrackOutput) } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 06929d67..77622968 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -37,11 +37,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var renderFramebuffer:Framebuffer! var audioSettings:[String:Any]? = nil - var shouldPassthroughAudio:Bool let movieProcessingContext:OpenGLContext - public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, audioSettings:[String:Any]? = nil, shouldPassthroughAudio:Bool = false) throws { + public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, audioSettings:[String:Any]? = nil) throws { imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup // Since we cannot access self before calling super, initialize here and not above let movieProcessingContext = OpenGLContext() @@ -55,8 +54,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.size = size assetWriter = try AVAssetWriter(url:URL, fileType:fileType) - // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. - assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) + // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last 1/4 second should be lost in that case. + assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(0.25, 1000) var localSettings:[String:Any] if let videoSettings = videoSettings { @@ -82,7 +81,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriter.add(assetWriterVideoInput) self.audioSettings = audioSettings - self.shouldPassthroughAudio = shouldPassthroughAudio self.movieProcessingContext = movieProcessingContext } @@ -90,19 +88,23 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func startRecording(_ completionCallback:((_ started: Bool) -> Void)? = nil) { startTime = nil - movieProcessingContext.runOperationAsynchronously { + // Don't do this work on the movieProcessingContext que so we don't block it. + // If it does get blocked framebuffers will pile up and after it is no longer blocked/this work has finished + // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped + DispatchQueue.global(qos: .utility).async { do { + var success = false try NSObject.catchException { - self.isRecording = self.assetWriter.startWriting() + success = self.assetWriter.startWriting() } - if(!self.isRecording) { + if(!success) { throw "Could not start asset writer: \(String(describing: self.assetWriter.error))" } guard let pixelBufferPool = self.assetWriterPixelBufferInput.pixelBufferPool else { - //When the pixelBufferPool returns nil, check the following: - //https://stackoverflow.com/a/20110179/1275014 + // When the pixelBufferPool returns nil, check the following: + // https://stackoverflow.com/a/20110179/1275014 throw "Pixel buffer pool was nil" } @@ -121,19 +123,23 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) - let bufferSize = GLSize(self.size) - var cachedTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) - let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - - self.renderFramebuffer = try Framebuffer(context:self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) - - completionCallback?(true) + // This work must be done on the movieProcessingContext since we access openGL + try self.movieProcessingContext.runOperationSynchronously { + let bufferSize = GLSize(self.size) + var cachedTextureRef:CVOpenGLESTexture? = nil + let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) + + self.renderFramebuffer = try Framebuffer(context:self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + + self.isRecording = true + + completionCallback?(true) + } } catch { print("Unable to start recording: \(error)") self.assetWriter.cancelWriting() - self.isRecording = false completionCallback?(false) } @@ -151,55 +157,54 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } self.finishRecordingCompletionCallback = completionCallback - - self.finishAudioWriting() - // Check if there was audio - if(self.previousAudioTime != nil) { - // Video will finish once a there is a frame time that is later than the last recorded audio buffer time - self.isFinishing = true + self.audioEncodingIsFinished = true + + // Check that there was audio and that this is live video + if let previousAudioTime = self.previousAudioTime, + self.encodingLiveVideo { - // Call finishVideoWriting just incase we don't recieve any additional audio buffers - self.movieProcessingContext.serialDispatchQueue.asyncAfter(deadline: .now() + 0.1) { - self.finishVideoWriting() + // Check if the last frame is later than the last recorded audio buffer + if let previousFrameTime = self.previousFrameTime, + CMTimeCompare(previousAudioTime, previousFrameTime) == -1 { + // Finish immediately + self.finishWriting() + } + else { + // Video will finish once a there is a frame time that is later than the last recorded audio buffer time + self.isFinishing = true + + // Finish after a delay just incase we don't recieve any additional audio buffers + self.movieProcessingContext.serialDispatchQueue.asyncAfter(deadline: .now() + 0.1) { + self.finishWriting() + } } } else { - // We can finish immediately since there is no audio - self.finishVideoWriting() + // Finish immediately since there is no audio + self.finishWriting() } } } - private func finishVideoWriting() { + private func finishWriting() { guard self.isRecording else { return } self.isFinishing = false self.isRecording = false - if (self.assetWriter.status == .writing && !self.videoEncodingIsFinished) { - self.videoEncodingIsFinished = true - self.assetWriterVideoInput.markAsFinished() - } + self.videoEncodingIsFinished = true self.assetWriter.finishWriting { self.finishRecordingCompletionCallback?() } } - private func finishAudioWriting() { - if (self.assetWriter.status == .writing && !self.audioEncodingIsFinished) { - self.audioEncodingIsFinished = true - self.assetWriterAudioInput?.markAsFinished() - } - } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); movieProcessingContext.runOperationAsynchronously { - guard self.renderFramebuffer != nil, - self.isRecording, + guard self.isRecording, self.assetWriter.status == .writing, !self.videoEncodingIsFinished else { return } @@ -211,8 +216,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if self.isFinishing, let previousAudioTime = self.previousAudioTime, CMTimeCompare(previousAudioTime, frameTime) == -1 { - // Finish recording - self.finishVideoWriting() + self.finishWriting() return } @@ -227,7 +231,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.previousFrameTime = frameTime - guard (self.assetWriterVideoInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { + guard (self.assetWriterVideoInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { debugPrint("Had to drop a frame at time \(frameTime)") return } @@ -284,27 +288,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Audio support public func activateAudioTrack() { - var settings:[String:Any]? = nil - if let audioSettings = self.audioSettings { - settings = audioSettings - } - else { - var acl = AudioChannelLayout() - memset(&acl, 0, MemoryLayout.size) - acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo - - settings = [ - AVFormatIDKey:kAudioFormatMPEG4AAC, - AVNumberOfChannelsKey:2, - AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, - AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), - AVEncoderBitRateKey:64000 - ] - } - - if(shouldPassthroughAudio) { settings = nil } - - assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:settings) + assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:self.audioSettings) assetWriter.add(assetWriterAudioInput!) assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } @@ -325,14 +309,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) if let firstFrameTime = self.firstFrameTime { - // Check if the time of this audio sample is before the time of the first frame - // If so then ignore it + // If the time of this audio sample is before the time of the first frame ignore it if (CMTimeCompare(currentSampleTime, firstFrameTime) == -1) { return } } else { - // We have not recorded any video yet, so we do not know if this audio sample + // We have not recorded any video yet so we do not know if this audio sample // falls before or after the time of the first frame which has not yet come in. // There may be a better solution for this case return @@ -340,7 +323,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.previousAudioTime = currentSampleTime - guard (assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { + guard (assetWriterAudioInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { + debugPrint("Had to drop a audio sample at time \(currentSampleTime)") return } From e8d354434b2f56c91055f5da9ad0f883311b4513 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Wed, 28 Feb 2018 01:47:41 -0800 Subject: [PATCH 044/332] Simpler black frame handling --- framework/Source/iOS/MovieOutput.swift | 85 ++++---------------------- 1 file changed, 12 insertions(+), 73 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 77622968..3cf99258 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -19,14 +19,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let size:Size let colorSwizzlingShader:ShaderProgram private var isRecording = false - private var isFinishing = false - private var finishRecordingCompletionCallback:(() -> Void)? = nil var videoEncodingIsFinished = false var audioEncodingIsFinished = false - private var startTime:CMTime? - private var firstFrameTime: CMTime? private var previousFrameTime: CMTime? - private var previousAudioTime: CMTime? var encodingLiveVideo:Bool { didSet { assetWriterVideoInput.expectsMediaDataInRealTime = encodingLiveVideo @@ -86,8 +81,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func startRecording(_ completionCallback:((_ started: Bool) -> Void)? = nil) { - startTime = nil - // Don't do this work on the movieProcessingContext que so we don't block it. // If it does get blocked framebuffers will pile up and after it is no longer blocked/this work has finished // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped @@ -144,62 +137,33 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { completionCallback?(false) } } - } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { movieProcessingContext.runOperationAsynchronously{ guard self.isRecording, - !self.isFinishing, self.assetWriter.status == .writing else { completionCallback?() return } - self.finishRecordingCompletionCallback = completionCallback - self.audioEncodingIsFinished = true + self.videoEncodingIsFinished = true - // Check that there was audio and that this is live video - if let previousAudioTime = self.previousAudioTime, - self.encodingLiveVideo { - - // Check if the last frame is later than the last recorded audio buffer - if let previousFrameTime = self.previousFrameTime, - CMTimeCompare(previousAudioTime, previousFrameTime) == -1 { - // Finish immediately - self.finishWriting() - } - else { - // Video will finish once a there is a frame time that is later than the last recorded audio buffer time - self.isFinishing = true - - // Finish after a delay just incase we don't recieve any additional audio buffers - self.movieProcessingContext.serialDispatchQueue.asyncAfter(deadline: .now() + 0.1) { - self.finishWriting() - } - } + self.isRecording = false + + if let lastFrame = self.previousFrameTime { + // Resolve black frames at the end. If we only call finishWriting() the session's effective end time + // will be the latest end timestamp of the session's samples which could be either video or audio + self.assetWriter.endSession(atSourceTime: lastFrame) } - else { - // Finish immediately since there is no audio - self.finishWriting() + + self.assetWriter.finishWriting { + completionCallback?() } } } - private func finishWriting() { - guard self.isRecording else { return } - - self.isFinishing = false - self.isRecording = false - - self.videoEncodingIsFinished = true - - self.assetWriter.finishWriting { - self.finishRecordingCompletionCallback?() - } - } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); @@ -211,22 +175,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Ignore still images and other non-video updates (do I still need this?) guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } - // Check if we are finishing and if this frame is later than the last recorded audio buffer - // Note: isFinishing is only set when there is an audio buffer, otherwise the video is finished immediately - if self.isFinishing, - let previousAudioTime = self.previousAudioTime, - CMTimeCompare(previousAudioTime, frameTime) == -1 { - self.finishWriting() - return - } - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case guard (frameTime != self.previousFrameTime) else { return } - if (self.startTime == nil) { + if (self.previousFrameTime == nil) { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out self.assetWriter.startSession(atSourceTime: frameTime) - self.startTime = frameTime - self.firstFrameTime = frameTime } self.previousFrameTime = frameTime @@ -308,21 +262,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - if let firstFrameTime = self.firstFrameTime { - // If the time of this audio sample is before the time of the first frame ignore it - if (CMTimeCompare(currentSampleTime, firstFrameTime) == -1) { - return - } - } - else { - // We have not recorded any video yet so we do not know if this audio sample - // falls before or after the time of the first frame which has not yet come in. - // There may be a better solution for this case - return - } - - self.previousAudioTime = currentSampleTime - guard (assetWriterAudioInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { debugPrint("Had to drop a audio sample at time \(currentSampleTime)") return From b76549a2da57d5733e16f49d8b47190dfa010e41 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 1 Mar 2018 19:16:38 -0800 Subject: [PATCH 045/332] SpeakerOutput support via sample buffers, further improved playback timing algortihm, optional mirror front facing camera --- framework/GPUImage.xcodeproj/project.pbxproj | 24 ++ framework/Source/GPUImage-Bridging-Header.h | 1 + framework/Source/iOS/Camera.swift | 17 +- framework/Source/iOS/MovieInput.swift | 84 +++-- framework/Source/iOS/MovieOutput.swift | 10 +- framework/Source/iOS/OpenGLContext.swift | 2 +- framework/Source/iOS/RenderView.swift | 3 +- framework/SpeakerOutput.swift | 318 +++++++++++++++++++ framework/TPCircularBuffer.h | 243 ++++++++++++++ framework/TPCircularBuffer.m | 149 +++++++++ 10 files changed, 793 insertions(+), 58 deletions(-) create mode 100644 framework/SpeakerOutput.swift create mode 100755 framework/TPCircularBuffer.h create mode 100755 framework/TPCircularBuffer.m diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index 0b0c9869..94200a32 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -11,6 +11,13 @@ 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */; }; 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; + 1F6D1CAB2048F79C00317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CA92048F79C00317B5F /* TPCircularBuffer.h */; }; + 1F6D1CAC2048F79C00317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CA92048F79C00317B5F /* TPCircularBuffer.h */; }; + 1F6D1CAD2048F79C00317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CAA2048F79C00317B5F /* TPCircularBuffer.m */; }; + 1F6D1CAE2048F79C00317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CAA2048F79C00317B5F /* TPCircularBuffer.m */; }; + 1F6D1CB12048F7BC00317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CAF2048F7BB00317B5F /* SpeakerOutput.swift */; }; + 1F6D1CB32048F81D00317B5F /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */; }; + 1F6D1CB52048F8DD00317B5F /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; BC0923A11C92661D00A2ADFA /* Pipeline_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */; }; BC0923A21C92664900A2ADFA /* Framebuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB279EB1C8D11630013E213 /* Framebuffer.swift */; }; @@ -377,6 +384,11 @@ 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSObject+Exception.m"; path = "Source/NSObject+Exception.m"; sourceTree = ""; }; 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSObject+Exception.h"; path = "Source/NSObject+Exception.h"; sourceTree = ""; }; 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "GPUImage-Bridging-Header.h"; path = "Source/GPUImage-Bridging-Header.h"; sourceTree = ""; }; + 1F6D1CA92048F79C00317B5F /* TPCircularBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TPCircularBuffer.h; sourceTree = ""; }; + 1F6D1CAA2048F79C00317B5F /* TPCircularBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TPCircularBuffer.m; sourceTree = ""; }; + 1F6D1CAF2048F7BB00317B5F /* SpeakerOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SpeakerOutput.swift; sourceTree = ""; }; + 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS11.2.sdk/System/Library/Frameworks/AudioToolbox.framework; sourceTree = DEVELOPER_DIR; }; + 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS11.2.sdk/System/Library/Frameworks/AVFoundation.framework; sourceTree = DEVELOPER_DIR; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Pipeline_Tests.swift; path = Tests/Pipeline_Tests.swift; sourceTree = SOURCE_ROOT; }; BC1E12F41C9F2FD7008F844F /* ThreeInput.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ThreeInput.vsh; path = Source/Operations/Shaders/ThreeInput.vsh; sourceTree = ""; }; @@ -717,6 +729,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 1F6D1CB52048F8DD00317B5F /* AVFoundation.framework in Frameworks */, + 1F6D1CB32048F81D00317B5F /* AudioToolbox.framework in Frameworks */, BC9E35021E524BE200B8604F /* OpenGLES.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -970,6 +984,8 @@ children = ( 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */, 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */, + 1F6D1CA92048F79C00317B5F /* TPCircularBuffer.h */, + 1F6D1CAA2048F79C00317B5F /* TPCircularBuffer.m */, BC4C85ED1C9F042900FD95D8 /* ConvertedShaders_GL.swift */, BC9E35531E52521F00B8604F /* ConvertedShaders_GLES.swift */, 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */, @@ -1012,6 +1028,8 @@ BC6E7CCB1C39ADDD006DF678 /* Frameworks */ = { isa = PBXGroup; children = ( + 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */, + 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */, BC9E35011E524BE200B8604F /* OpenGLES.framework */, BC6E7CC91C39ADCC006DF678 /* OpenGL.framework */, ); @@ -1133,6 +1151,7 @@ BC9E35201E524D2A00B8604F /* iOS */ = { isa = PBXGroup; children = ( + 1F6D1CAF2048F7BB00317B5F /* SpeakerOutput.swift */, BC9E35231E524D4D00B8604F /* RenderView.swift */, BC9E35221E524D4D00B8604F /* PictureOutput.swift */, BC9E35211E524D4D00B8604F /* MovieOutput.swift */, @@ -1209,6 +1228,7 @@ buildActionMask = 2147483647; files = ( 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, + 1F6D1CAB2048F79C00317B5F /* TPCircularBuffer.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1217,6 +1237,7 @@ buildActionMask = 2147483647; files = ( 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, + 1F6D1CAC2048F79C00317B5F /* TPCircularBuffer.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1460,6 +1481,7 @@ BCFF46C01CB9556B00A0C521 /* WhiteBalance.swift in Sources */, BC7FD14E1CB0BD3900037949 /* ZoomBlur.swift in Sources */, BCFB07921CBF37A1009B2333 /* TextureInput.swift in Sources */, + 1F6D1CAD2048F79C00317B5F /* TPCircularBuffer.m in Sources */, BC6E7CC71C39AD9E006DF678 /* ShaderProgram.swift in Sources */, BCFF46CA1CB96BD700A0C521 /* HighPassFilter.swift in Sources */, BC7FD1321CB0A57F00037949 /* HighlightsAndShadows.swift in Sources */, @@ -1644,6 +1666,7 @@ BC9E35511E52518F00B8604F /* Timestamp.swift in Sources */, BC9E35781E5256EB00B8604F /* ColorMatrixFilter.swift in Sources */, BC9E35D11E52580400B8604F /* ScreenBlend.swift in Sources */, + 1F6D1CAE2048F79C00317B5F /* TPCircularBuffer.m in Sources */, BC9E356A1E5256C200B8604F /* Haze.swift in Sources */, BC9E35D31E52580A00B8604F /* SourceOverBlend.swift in Sources */, BC9E357E1E5256FE00B8604F /* Vibrance.swift in Sources */, @@ -1651,6 +1674,7 @@ BC9E356E1E5256CE00B8604F /* FalseColor.swift in Sources */, BC9E35881E52572000B8604F /* ThresholdSobelEdgeDetection.swift in Sources */, BC9E356F1E5256D000B8604F /* HighlightsAndShadows.swift in Sources */, + 1F6D1CB12048F7BC00317B5F /* SpeakerOutput.swift in Sources */, 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC9E35AA1E52578900B8604F /* Halftone.swift in Sources */, BC9E35961E52574A00B8604F /* ImageBuffer.swift in Sources */, diff --git a/framework/Source/GPUImage-Bridging-Header.h b/framework/Source/GPUImage-Bridging-Header.h index 3ba6a8ce..379e1ac3 100644 --- a/framework/Source/GPUImage-Bridging-Header.h +++ b/framework/Source/GPUImage-Bridging-Header.h @@ -10,5 +10,6 @@ #define GPUImage_Bridging_Header_h #import "NSObject+Exception.h" +#import "TPCircularBuffer.h" #endif /* GPUImage_Bridging_Header_h */ diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 019d7c64..05328bbb 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -7,12 +7,14 @@ public protocol CameraDelegate: class { public enum PhysicalCameraLocation { case backFacing case frontFacing + case frontFacingMirrored // Documentation: "The front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight." func imageOrientation() -> ImageOrientation { switch self { case .backFacing: return .landscapeRight case .frontFacing: return .landscapeLeft + case .frontFacingMirrored: return .landscapeLeft } } @@ -20,6 +22,7 @@ public enum PhysicalCameraLocation { switch self { case .backFacing: return .back case .frontFacing: return .front + case .frontFacingMirrored: return .front } } @@ -50,7 +53,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var logFPS:Bool = false public var audioEncodingTarget:AudioEncodingTarget? { didSet { - guard var audioEncodingTarget = audioEncodingTarget else { + guard let audioEncodingTarget = audioEncodingTarget else { // Removing the audio inputs and outputs causes a black flash on the video output //self.removeAudioInputsAndOutputs() return @@ -68,18 +71,18 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public weak var delegate: CameraDelegate? public let captureSession:AVCaptureSession public let inputCamera:AVCaptureDevice! - let videoInput:AVCaptureDeviceInput! + public let videoInput:AVCaptureDeviceInput! public let videoOutput:AVCaptureVideoDataOutput! - var microphone:AVCaptureDevice? - var audioInput:AVCaptureDeviceInput? + public var microphone:AVCaptureDevice? + public var audioInput:AVCaptureDeviceInput? public var audioOutput:AVCaptureAudioDataOutput? var supportsFullYUVRange:Bool = false let captureAsYUV:Bool let yuvConversionShader:ShaderProgram? let frameRenderingSemaphore = DispatchSemaphore(value:1) - let cameraProcessingQueue = DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default) - let audioProcessingQueue = DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default) + let cameraProcessingQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.cameraProcessingQueue", qos: .default) + let audioProcessingQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.audioProcessingQueue", qos: .default) let framesToIgnore = 5 var numberOfFramesCaptured = 0 @@ -158,7 +161,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if let connections = videoOutput.connections as? [AVCaptureConnection] { for connection in connections { if(connection.isVideoMirroringSupported) { - connection.isVideoMirrored = (location == .frontFacing) + connection.isVideoMirrored = (location == .frontFacingMirrored) } } } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index c02d9065..afbb2f27 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -14,7 +14,7 @@ public class MovieInput: ImageSource { public var audioEncodingTarget:AudioEncodingTarget? { didSet { - guard var audioEncodingTarget = audioEncodingTarget else { + guard let audioEncodingTarget = audioEncodingTarget else { return } audioEncodingTarget.activateAudioTrack() @@ -28,15 +28,12 @@ public class MovieInput: ImageSource { let asset:AVAsset let videoComposition:AVVideoComposition? var playAtActualSpeed:Bool - var startFrameTime:CMTime? - var lastFrameTime:CMTime? + var requestedStartTime:CMTime? + var actualStartTime:DispatchTime? + private(set) public var currentTime:CMTime? public var loop:Bool - public var currentFrameTime:CMTime? { - get { - return self.lastFrameTime - } - } + public var completion: (() -> Void)? public var synchronizedMovieOutput:MovieOutput? { @@ -88,7 +85,7 @@ public class MovieInput: ImageSource { // MARK: Playback control public func start(atTime: CMTime) { - self.startFrameTime = atTime + self.requestedStartTime = atTime self.start() } @@ -113,7 +110,7 @@ public class MovieInput: ImageSource { public func pause() { self.cancel() - self.startFrameTime = self.lastFrameTime + self.requestedStartTime = self.currentTime } // MARK: - @@ -146,11 +143,12 @@ public class MovieInput: ImageSource { assetReader.add(readerAudioTrackOutput) } - if let startFrameTime = self.startFrameTime { - assetReader.timeRange = CMTimeRange(start: startFrameTime, duration: kCMTimePositiveInfinity) + if let requestedStartTime = self.requestedStartTime { + assetReader.timeRange = CMTimeRange(start: requestedStartTime, duration: kCMTimePositiveInfinity) } - self.startFrameTime = nil - self.lastFrameTime = nil + self.requestedStartTime = nil + self.currentTime = nil + self.actualStartTime = nil return assetReader } catch { @@ -168,10 +166,11 @@ public class MovieInput: ImageSource { self.configureThread() } else if(playAtActualSpeed) { - thread.qualityOfService = .userInteractive + thread.qualityOfService = .userInitiated } else { - thread.qualityOfService = .default // Synchronized encoding + // This includes syncronized encoding since the above vars will be disabled for it + thread.qualityOfService = .default } guard let assetReader = self.createReader() else { @@ -224,7 +223,7 @@ public class MovieInput: ImageSource { } else { self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) - if let readerAudioTrackOutput = readerAudioTrackOutput { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } + if let readerAudioTrackOutput = readerAudioTrackOutput, self.audioEncodingTarget?.readyForNextAudioBuffer() ?? true { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } } } @@ -251,53 +250,43 @@ public class MovieInput: ImageSource { guard let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() else { if let movieOutput = self.synchronizedMovieOutput { movieOutput.movieProcessingContext.runOperationAsynchronously { - // Clients that are monitoring each input's readyForMoreMediaData value must call markAsFinished on an input when they are done appending buffers to it. - // This is necessary to prevent other inputs from stalling, as they may otherwise wait forever for that input's media data, attempting to complete the ideal interleaving pattern. + // Documentation: "Clients that are monitoring each input's readyForMoreMediaData value must call markAsFinished on an input when they are done + // appending buffers to it. This is necessary to prevent other inputs from stalling, as they may otherwise wait forever + // for that input's media data, attempting to complete the ideal interleaving pattern." movieOutput.videoEncodingIsFinished = true movieOutput.assetWriterVideoInput.markAsFinished() } } - return } if(self.synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print("Process frame input") } - let renderStart = DispatchTime.now() - var frameDurationNanos: Float64 = 0 + let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) if (self.playAtActualSpeed) { - // Sample time e.g. first frame is 0,30 second frame is 1,30 - let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - // Retrieve the rolling frame rate (duration between each frame) - let frameDuration = CMTimeSubtract(currentSampleTime, self.lastFrameTime ?? CMTimeAdd(currentSampleTime, CMTime(value: 1, timescale: 30))) - frameDurationNanos = CMTimeGetSeconds(frameDuration) * 1_000_000_000 + let currentSampleTimeNanoseconds = Int64(currentSampleTime.seconds * 1_000_000_000) + let currentActualTime = DispatchTime.now() - self.lastFrameTime = currentSampleTime + if(self.actualStartTime == nil) { self.actualStartTime = currentActualTime } + + // Determine how much time we need to wait in order to catch up to the current time relative to the start + // We are forcing the samples to adhear to their own sample times. + let delay = currentSampleTimeNanoseconds - (currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds) + + //print("currentSampleTime: \(currentSampleTimeNanoseconds) currentTime: \((currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds)) delay: \(delay)") + + if(delay > 0) { + mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(delay))) + } } + self.currentTime = currentSampleTime + sharedImageProcessingContext.runOperationSynchronously{ self.process(movieFrame:sampleBuffer) CMSampleBufferInvalidate(sampleBuffer) } - - if(self.playAtActualSpeed) { - let renderEnd = DispatchTime.now() - // Find the amount of time it took to display the last frame - let renderDurationNanos = Double(renderEnd.uptimeNanoseconds - renderStart.uptimeNanoseconds) - // Find how much time we should wait to display the next frame. So it would be the frame duration minus the - // amount of time we already spent rendering the current frame. - let waitDurationNanos = Int(frameDurationNanos - renderDurationNanos) - - // When the wait duration begins returning negative values consistently - // It means the OS is unable to provide enough processing time for the above work - // and that you need to adjust the real time thread policy below - //print("Render duration: \(String(format: "%.4f",renderDurationNanos / 1_000_000)) ms Wait duration: \(String(format: "%.4f",Double(waitDurationNanos) / 1_000_000)) ms") - - if(waitDurationNanos > 0) { - mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(waitDurationNanos))) - } - } } func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput:AVAssetReaderOutput) { @@ -308,7 +297,6 @@ public class MovieInput: ImageSource { movieOutput.assetWriterAudioInput?.markAsFinished() } } - return } @@ -418,7 +406,7 @@ public class MovieInput: ImageSource { // Not needed for movie inputs } - public func reprocessLastFrame() { + public func transmitPreviousFrame() { sharedImageProcessingContext.runOperationAsynchronously { if let movieFramebuffer = self.movieFramebuffer { self.updateTargetsWithFramebuffer(movieFramebuffer) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 3cf99258..c9a47a9a 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -2,12 +2,15 @@ import AVFoundation extension String: Error {} -public protocol AudioEncodingTarget { +@objc public protocol AudioEncodingTarget { func activateAudioTrack() func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) + // Note: This is not used for synchronized encoding. + func readyForNextAudioBuffer() -> Bool } public class MovieOutput: ImageConsumer, AudioEncodingTarget { + public let sources = SourceContainer() public let maximumInputs:UInt = 1 @@ -286,6 +289,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { work() } } + + // Note: This is not used for synchronized encoding. + public func readyForNextAudioBuffer() -> Bool { + return true + } } diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index b8744496..dec61982 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -27,7 +27,7 @@ public class OpenGLContext: SerialDispatch { }() - public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", qos: .userInteractive) + public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", qos: .userInitiated) public let dispatchQueueKey = DispatchSpecificKey() public let dispatchQueueKeyValue: Int diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 12547c4a..96511d6a 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -16,6 +16,7 @@ public class RenderView:UIView, ImageConsumer { public var sizeInPixels:Size { get { return Size(width:Float(frame.size.width * contentScaleFactor), height:Float(frame.size.height * contentScaleFactor))}} public var shouldPresentWithTransaction = false + public var waitsForTransaction = true public let sources = SourceContainer() public let maximumInputs:UInt = 1 @@ -175,7 +176,7 @@ public class RenderView:UIView, ImageConsumer { let processFramebuffer = { // Don't bog down UIKIt with a bunch of framebuffers if we are waiting for a transaction to complete // otherwise we will block the main thread as it trys to catch up. - if (self.waitingForTransaction) { return } + if (self.waitingForTransaction && self.waitsForTransaction) { return } self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) diff --git a/framework/SpeakerOutput.swift b/framework/SpeakerOutput.swift new file mode 100644 index 00000000..03f15df5 --- /dev/null +++ b/framework/SpeakerOutput.swift @@ -0,0 +1,318 @@ +// +// SpeakerOutput.swift +// GPUImage +// +// Rewritten by Josh Bernfeld on 3/1/18 +// and originally created by Uzi Refaeli on 3/9/13. +// Copyright (c) 2018 Brad Larson. All rights reserved. +// + +import Foundation +import AudioToolbox +import AVFoundation + +class SpeakerOutput: AudioEncodingTarget { + + public var changesAudioSession = true + + var isPlaying = false + var hasBuffer = false + var isReadyForMoreMediaData = true { + willSet { + guard newValue else { return } + + // When we are ready to begin accepting new data check if we had something + // in the rescue buffer. If we did then move it to the main buffer. + self.copyRescueBufferContentsToCircularBuffer() + } + } + + var processingGraph:AUGraph? + var mixerUnit:AudioUnit? + + var firstBufferReached = false + + let outputBus:AudioUnitElement = 0 + let inputBus:AudioUnitElement = 1 + + let unitSize = UInt32(MemoryLayout.size) + let bufferUnit:UInt32 = 655360 + + var circularBuffer = TPCircularBuffer() + let circularBufferSize:UInt32 + + var rescueBuffer:UnsafeMutableRawPointer? + let rescueBufferSize:Int + var rescueBufferContentsSize:UInt32 = 0 + + + init() { + circularBufferSize = bufferUnit * unitSize + rescueBufferSize = Int(bufferUnit / 2) + } + + deinit { + if let processingGraph = processingGraph { + DisposeAUGraph(processingGraph) + } + if let rescueBuffer = rescueBuffer { + free(rescueBuffer) + } + TPCircularBufferCleanup(&circularBuffer) + + self.stop() + } + + // MARK: - + // MARK: Playback control + + public func start() { + AUGraphStart(processingGraph!) + + isPlaying = true + } + + public func stop() { + AUGraphStop(processingGraph!) + + isPlaying = false + } + + // MARK: - + // MARK: AudioEncodingTarget protocol + + func activateAudioTrack() { + if(changesAudioSession) { + do { + try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryAmbient) + try AVAudioSession.sharedInstance().setActive(true) + } + catch { + print("ERROR: Unable to set audio session: \(error)") + } + } + + // Create a new AUGraph + NewAUGraph(&processingGraph) + + // AUNodes represent AudioUnits on the AUGraph and provide an + // easy means for connecting audioUnits together. + var outputNode = AUNode() + var mixerNode = AUNode() + + // Create AudioComponentDescriptions for the AUs we want in the graph mixer component + var mixerDesc = AudioComponentDescription() + mixerDesc.componentType = kAudioUnitType_Mixer + mixerDesc.componentSubType = kAudioUnitSubType_SpatialMixer + mixerDesc.componentFlags = 0 + mixerDesc.componentFlagsMask = 0 + mixerDesc.componentManufacturer = kAudioUnitManufacturer_Apple + + // Output component + var outputDesc = AudioComponentDescription() + outputDesc.componentType = kAudioUnitType_Output + outputDesc.componentSubType = kAudioUnitSubType_RemoteIO + outputDesc.componentFlags = 0 + outputDesc.componentFlagsMask = 0 + outputDesc.componentManufacturer = kAudioUnitManufacturer_Apple + + // Add nodes to the graph to hold our AudioUnits, + // You pass in a reference to the AudioComponentDescription + // and get back an AudioUnit + AUGraphAddNode(processingGraph!, &mixerDesc, &mixerNode) + AUGraphAddNode(processingGraph!, &outputDesc, &outputNode) + + // Now we can manage connections using nodes in the graph. + // Connect the mixer node's output to the output node's input + AUGraphConnectNodeInput(processingGraph!, mixerNode, 0, outputNode, 0) + + // Upon return from this function call, the audio units belonging to the graph are open but not initialized. Specifically, no resource allocation occurs. + AUGraphOpen(processingGraph!) + + // Get a link to the mixer AU so we can talk to it later + AUGraphNodeInfo(processingGraph!, mixerNode, nil, &mixerUnit) + + var elementCount:UInt32 = 1 + AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_ElementCount, kAudioUnitScope_Input, 0, &elementCount, UInt32(MemoryLayout.size)) + + // Set output callback, this is how audio sample data will be retrieved + var callbackStruct = AURenderCallbackStruct() + callbackStruct.inputProc = playbackCallback + callbackStruct.inputProcRefCon = bridgeObject(self) + AUGraphSetNodeInputCallback(processingGraph!, mixerNode, 0, &callbackStruct) + + // Describe the format, this will get adjusted when the first sample comes in. + var audioFormat = AudioStreamBasicDescription() + audioFormat.mFormatID = kAudioFormatLinearPCM + audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked + audioFormat.mSampleRate = 44100.0 + audioFormat.mReserved = 0 + + audioFormat.mBytesPerPacket = 2 + audioFormat.mFramesPerPacket = 1 + audioFormat.mBytesPerFrame = 2 + audioFormat.mChannelsPerFrame = 1 + audioFormat.mBitsPerChannel = 16 + + // Apply the format + AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, outputBus, &audioFormat, UInt32(MemoryLayout.size)) + + // Initialize the processing graph + AUGraphInitialize(processingGraph!) + + circularBuffer = TPCircularBuffer() + + // Initialize the circular buffer + _TPCircularBufferInit(&circularBuffer, circularBufferSize, MemoryLayout.size) + + hasBuffer = false + } + + func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { + defer { + if(shouldInvalidateSampleWhenDone) { + CMSampleBufferInvalidate(sampleBuffer) + } + } + + if(!isReadyForMoreMediaData || !isPlaying) { return } + + if(!firstBufferReached) { + firstBufferReached = true + // Get the format information of the sample + let desc = CMSampleBufferGetFormatDescription(sampleBuffer)! + let basicDesc = CMAudioFormatDescriptionGetStreamBasicDescription(desc)! + + var oSize = UInt32(MemoryLayout.size) + // Retrieve the existing set audio format + var audioFormat = AudioStreamBasicDescription() + AudioUnitGetProperty(mixerUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, outputBus, &audioFormat, &oSize) + + // Update the audio format with the information we have from the sample + audioFormat.mSampleRate = basicDesc.pointee.mSampleRate + + audioFormat.mBytesPerPacket = basicDesc.pointee.mBytesPerPacket + audioFormat.mFramesPerPacket = basicDesc.pointee.mFramesPerPacket + audioFormat.mBytesPerFrame = basicDesc.pointee.mBytesPerFrame + audioFormat.mChannelsPerFrame = basicDesc.pointee.mChannelsPerFrame + audioFormat.mBitsPerChannel = basicDesc.pointee.mBitsPerChannel + + // Apply the format + AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, outputBus, &audioFormat, UInt32(MemoryLayout.size)) + AUGraphUpdate(processingGraph!, nil) + } + + // Populate an AudioBufferList with the sample + var audioBufferList = AudioBufferList() + var blockBuffer:CMBlockBuffer? + CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, nil, &audioBufferList, MemoryLayout.size, nil, nil, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer) + + // This is actually doing audioBufferList.mBuffers[0] + // Since the struct has an array of length of 1 the compiler is interpreting + // it as a single item array and not letting us use the above line. + // Since the array pointer points to the first item of the c array this is equally fine. + let audioBuffer = audioBufferList.mBuffers + + // Place the AudioBufferList in the circular buffer + let sampleSize = UInt32(CMSampleBufferGetTotalSampleSize(sampleBuffer)) + let didCopyBytes = TPCircularBufferProduceBytes(&circularBuffer, audioBuffer.mData, sampleSize) + + // The circular buffer has not been proceseed quickly enough and has filled up. + // Disable reading any further samples and save this last buffer so we don't lose it. + if(!didCopyBytes) { + //print("TPCircularBuffer limit reached: \(sampleSize) Bytes") + + isReadyForMoreMediaData = false + + self.writeToRescueBuffer(audioBuffer.mData, sampleSize) + } + else { + hasBuffer = true + } + } + + func readyForNextAudioBuffer() -> Bool { + return isReadyForMoreMediaData + } + + // MARK: - + // MARK: Rescue buffer + + func writeToRescueBuffer(_ src: UnsafeRawPointer!, _ size: UInt32) { + if(rescueBufferContentsSize > 0) { + print("WARNING: Writing to rescue buffer with contents already inside") + } + + if(size > rescueBufferSize) { + print("WARNING: Unable to allocate enought space for rescue buffer, dropping audio sample") + } + else { + if(rescueBuffer == nil) { + rescueBuffer = malloc(rescueBufferSize) + } + + rescueBufferContentsSize = size + memcpy(rescueBuffer!, src, Int(size)) + } + } + + func copyRescueBufferContentsToCircularBuffer() { + if(rescueBufferContentsSize > 0) { + let didCopyBytes = TPCircularBufferProduceBytes(&circularBuffer, rescueBuffer, rescueBufferContentsSize) + if(!didCopyBytes) { + print("WARNING: Unable to copy rescue buffer into main buffer, dropping audio sample") + } + rescueBufferContentsSize = 0 + } + } +} + +func playbackCallback( + inRefCon:UnsafeMutableRawPointer, + ioActionFlags:UnsafeMutablePointer, + inTimeStamp:UnsafePointer, + inBusNumber:UInt32, + inNumberFrames:UInt32, + ioData:UnsafeMutablePointer?) -> OSStatus { + + let audioBuffer = ioData!.pointee.mBuffers + let numberOfChannels = audioBuffer.mNumberChannels + let outSamples = audioBuffer.mData + + // Zero-out all of the output samples first + memset(outSamples, 0, Int(audioBuffer.mDataByteSize)) + + let p = bridgeRawPointer(inRefCon) as! SpeakerOutput + + if(p.hasBuffer && p.isPlaying) { + var availableBytes:UInt32 = 0 + let bufferTail = TPCircularBufferTail(&p.circularBuffer, &availableBytes) + + let requestedBytesSize = inNumberFrames * p.unitSize * numberOfChannels + + let bytesToRead = min(availableBytes, requestedBytesSize) + // Copy the bytes from the circular buffer into the outSample + memcpy(outSamples, bufferTail, Int(bytesToRead)) + // Clear what we just read out of the circular buffer + TPCircularBufferConsume(&p.circularBuffer, bytesToRead) + + if(availableBytes <= requestedBytesSize*2) { + p.isReadyForMoreMediaData = true + } + + if(availableBytes <= requestedBytesSize) { + p.hasBuffer = false + } + } + + return noErr +} + +func bridgeObject(_ obj : AnyObject) -> UnsafeMutableRawPointer { + return UnsafeMutableRawPointer(Unmanaged.passUnretained(obj).toOpaque()) +} + +func bridgeRawPointer(_ ptr : UnsafeMutableRawPointer) -> AnyObject { + return Unmanaged.fromOpaque(ptr).takeUnretainedValue() +} + diff --git a/framework/TPCircularBuffer.h b/framework/TPCircularBuffer.h new file mode 100755 index 00000000..88129560 --- /dev/null +++ b/framework/TPCircularBuffer.h @@ -0,0 +1,243 @@ +// +// TPCircularBuffer.h +// Circular/Ring buffer implementation +// +// https://github.com/michaeltyson/TPCircularBuffer +// +// Created by Michael Tyson on 10/12/2011. +// +// +// This implementation makes use of a virtual memory mapping technique that inserts a virtual copy +// of the buffer memory directly after the buffer's end, negating the need for any buffer wrap-around +// logic. Clients can simply use the returned memory address as if it were contiguous space. +// +// The implementation is thread-safe in the case of a single producer and single consumer. +// +// Virtual memory technique originally proposed by Philip Howard (http://vrb.slashusr.org/), and +// adapted to Darwin by Kurt Revis (http://www.snoize.com, +// http://www.snoize.com/Code/PlayBufferedSoundFile.tar.gz) +// +// +// Copyright (C) 2012-2013 A Tasty Pixel +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// +// 3. This notice may not be removed or altered from any source distribution. +// + +#ifndef TPCircularBuffer_h +#define TPCircularBuffer_h + +#include +#include +#include + +#ifdef __cplusplus + extern "C++" { + #include + typedef std::atomic_int atomicInt; + #define atomicFetchAdd(a,b) std::atomic_fetch_add(a,b) + } +#else + #include + typedef atomic_int atomicInt; + #define atomicFetchAdd(a,b) atomic_fetch_add(a,b) +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct { + void *buffer; + uint32_t length; + uint32_t tail; + uint32_t head; + volatile atomicInt fillCount; + bool atomic; +} TPCircularBuffer; + +/*! + * Initialise buffer + * + * Note that the length is advisory only: Because of the way the + * memory mirroring technique works, the true buffer length will + * be multiples of the device page size (e.g. 4096 bytes) + * + * If you intend to use the AudioBufferList utilities, you should + * always allocate a bit more space than you need for pure audio + * data, so there's room for the metadata. How much extra is required + * depends on how many AudioBufferList structures are used, which is + * a function of how many audio frames each buffer holds. A good rule + * of thumb is to add 15%, or at least another 2048 bytes or so. + * + * @param buffer Circular buffer + * @param length Length of buffer + */ +#define TPCircularBufferInit(buffer, length) \ + _TPCircularBufferInit(buffer, length, sizeof(*buffer)) +bool _TPCircularBufferInit(TPCircularBuffer *buffer, uint32_t length, size_t structSize); + +/*! + * Cleanup buffer + * + * Releases buffer resources. + */ +void TPCircularBufferCleanup(TPCircularBuffer *buffer); + +/*! + * Clear buffer + * + * Resets buffer to original, empty state. + * + * This is safe for use by consumer while producer is accessing + * buffer. + */ +void TPCircularBufferClear(TPCircularBuffer *buffer); + +/*! + * Set the atomicity + * + * If you set the atomiticy to false using this method, the buffer will + * not use atomic operations. This can be used to give the compiler a little + * more optimisation opportunities when the buffer is only used on one thread. + * + * Important note: Only set this to false if you know what you're doing! + * + * The default value is true (the buffer will use atomic operations) + * + * @param buffer Circular buffer + * @param atomic Whether the buffer is atomic (default true) + */ +void TPCircularBufferSetAtomic(TPCircularBuffer *buffer, bool atomic); + +// Reading (consuming) + +/*! + * Access end of buffer + * + * This gives you a pointer to the end of the buffer, ready + * for reading, and the number of available bytes to read. + * + * @param buffer Circular buffer + * @param availableBytes On output, the number of bytes ready for reading + * @return Pointer to the first bytes ready for reading, or NULL if buffer is empty + */ +static __inline__ __attribute__((always_inline)) void* TPCircularBufferTail(TPCircularBuffer *buffer, uint32_t* availableBytes) { + *availableBytes = buffer->fillCount; + if ( *availableBytes == 0 ) return NULL; + return (void*)((char*)buffer->buffer + buffer->tail); +} + +/*! + * Consume bytes in buffer + * + * This frees up the just-read bytes, ready for writing again. + * + * @param buffer Circular buffer + * @param amount Number of bytes to consume + */ +static __inline__ __attribute__((always_inline)) void TPCircularBufferConsume(TPCircularBuffer *buffer, uint32_t amount) { + buffer->tail = (buffer->tail + amount) % buffer->length; + if ( buffer->atomic ) { + atomicFetchAdd(&buffer->fillCount, -amount); + } else { + buffer->fillCount -= amount; + } + assert(buffer->fillCount >= 0); +} + +/*! + * Access front of buffer + * + * This gives you a pointer to the front of the buffer, ready + * for writing, and the number of available bytes to write. + * + * @param buffer Circular buffer + * @param availableBytes On output, the number of bytes ready for writing + * @return Pointer to the first bytes ready for writing, or NULL if buffer is full + */ +static __inline__ __attribute__((always_inline)) void* TPCircularBufferHead(TPCircularBuffer *buffer, uint32_t* availableBytes) { + *availableBytes = (buffer->length - buffer->fillCount); + if ( *availableBytes == 0 ) return NULL; + return (void*)((char*)buffer->buffer + buffer->head); +} + +// Writing (producing) + +/*! + * Produce bytes in buffer + * + * This marks the given section of the buffer ready for reading. + * + * @param buffer Circular buffer + * @param amount Number of bytes to produce + */ +static __inline__ __attribute__((always_inline)) void TPCircularBufferProduce(TPCircularBuffer *buffer, uint32_t amount) { + buffer->head = (buffer->head + amount) % buffer->length; + if ( buffer->atomic ) { + atomicFetchAdd(&buffer->fillCount, amount); + } else { + buffer->fillCount += amount; + } + assert(buffer->fillCount <= buffer->length); +} + +/*! + * Helper routine to copy bytes to buffer + * + * This copies the given bytes to the buffer, and marks them ready for reading. + * + * @param buffer Circular buffer + * @param src Source buffer + * @param len Number of bytes in source buffer + * @return true if bytes copied, false if there was insufficient space + */ +static __inline__ __attribute__((always_inline)) bool TPCircularBufferProduceBytes(TPCircularBuffer *buffer, const void* src, uint32_t len) { + uint32_t space; + void *ptr = TPCircularBufferHead(buffer, &space); + if ( space < len ) return false; + memcpy(ptr, src, len); + TPCircularBufferProduce(buffer, len); + return true; +} + +/*! + * Deprecated method + */ +static __inline__ __attribute__((always_inline)) __deprecated_msg("use TPCircularBufferSetAtomic(false) and TPCircularBufferConsume instead") +void TPCircularBufferConsumeNoBarrier(TPCircularBuffer *buffer, uint32_t amount) { + buffer->tail = (buffer->tail + amount) % buffer->length; + buffer->fillCount -= amount; + assert(buffer->fillCount >= 0); +} + +/*! + * Deprecated method + */ +static __inline__ __attribute__((always_inline)) __deprecated_msg("use TPCircularBufferSetAtomic(false) and TPCircularBufferProduce instead") +void TPCircularBufferProduceNoBarrier(TPCircularBuffer *buffer, uint32_t amount) { + buffer->head = (buffer->head + amount) % buffer->length; + buffer->fillCount += amount; + assert(buffer->fillCount <= buffer->length); +} + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/framework/TPCircularBuffer.m b/framework/TPCircularBuffer.m new file mode 100755 index 00000000..a3e6b3c5 --- /dev/null +++ b/framework/TPCircularBuffer.m @@ -0,0 +1,149 @@ +// +// TPCircularBuffer.c +// Circular/Ring buffer implementation +// +// https://github.com/michaeltyson/TPCircularBuffer +// +// Created by Michael Tyson on 10/12/2011. +// +// Copyright (C) 2012-2013 A Tasty Pixel +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// +// 3. This notice may not be removed or altered from any source distribution. +// + +#include "TPCircularBuffer.h" +#include +#include +#include + +#define reportResult(result,operation) (_reportResult((result),(operation),strrchr(__FILE__, '/')+1,__LINE__)) +static inline bool _reportResult(kern_return_t result, const char *operation, const char* file, int line) { + if ( result != ERR_SUCCESS ) { + printf("%s:%d: %s: %s\n", file, line, operation, mach_error_string(result)); + return false; + } + return true; +} + +bool _TPCircularBufferInit(TPCircularBuffer *buffer, uint32_t length, size_t structSize) { + + assert(length > 0); + + if ( structSize != sizeof(TPCircularBuffer) ) { + fprintf(stderr, "TPCircularBuffer: Header version mismatch. Check for old versions of TPCircularBuffer in your project\n"); + abort(); + } + + // Keep trying until we get our buffer, needed to handle race conditions + int retries = 3; + while ( true ) { + + buffer->length = (uint32_t)round_page(length); // We need whole page sizes + + // Temporarily allocate twice the length, so we have the contiguous address space to + // support a second instance of the buffer directly after + vm_address_t bufferAddress; + kern_return_t result = vm_allocate(mach_task_self(), + &bufferAddress, + buffer->length * 2, + VM_FLAGS_ANYWHERE); // allocate anywhere it'll fit + if ( result != ERR_SUCCESS ) { + if ( retries-- == 0 ) { + reportResult(result, "Buffer allocation"); + return false; + } + // Try again if we fail + continue; + } + + // Now replace the second half of the allocation with a virtual copy of the first half. Deallocate the second half... + result = vm_deallocate(mach_task_self(), + bufferAddress + buffer->length, + buffer->length); + if ( result != ERR_SUCCESS ) { + if ( retries-- == 0 ) { + reportResult(result, "Buffer deallocation"); + return false; + } + // If this fails somehow, deallocate the whole region and try again + vm_deallocate(mach_task_self(), bufferAddress, buffer->length); + continue; + } + + // Re-map the buffer to the address space immediately after the buffer + vm_address_t virtualAddress = bufferAddress + buffer->length; + vm_prot_t cur_prot, max_prot; + result = vm_remap(mach_task_self(), + &virtualAddress, // mirror target + buffer->length, // size of mirror + 0, // auto alignment + 0, // force remapping to virtualAddress + mach_task_self(), // same task + bufferAddress, // mirror source + 0, // MAP READ-WRITE, NOT COPY + &cur_prot, // unused protection struct + &max_prot, // unused protection struct + VM_INHERIT_DEFAULT); + if ( result != ERR_SUCCESS ) { + if ( retries-- == 0 ) { + reportResult(result, "Remap buffer memory"); + return false; + } + // If this remap failed, we hit a race condition, so deallocate and try again + vm_deallocate(mach_task_self(), bufferAddress, buffer->length); + continue; + } + + if ( virtualAddress != bufferAddress+buffer->length ) { + // If the memory is not contiguous, clean up both allocated buffers and try again + if ( retries-- == 0 ) { + printf("Couldn't map buffer memory to end of buffer\n"); + return false; + } + + vm_deallocate(mach_task_self(), virtualAddress, buffer->length); + vm_deallocate(mach_task_self(), bufferAddress, buffer->length); + continue; + } + + buffer->buffer = (void*)bufferAddress; + buffer->fillCount = 0; + buffer->head = buffer->tail = 0; + buffer->atomic = true; + + return true; + } + return false; +} + +void TPCircularBufferCleanup(TPCircularBuffer *buffer) { + vm_deallocate(mach_task_self(), (vm_address_t)buffer->buffer, buffer->length * 2); + memset(buffer, 0, sizeof(TPCircularBuffer)); +} + +void TPCircularBufferClear(TPCircularBuffer *buffer) { + uint32_t fillCount; + if ( TPCircularBufferTail(buffer, &fillCount) ) { + TPCircularBufferConsume(buffer, fillCount); + } +} + +void TPCircularBufferSetAtomic(TPCircularBuffer *buffer, bool atomic) { + buffer->atomic = atomic; +} From 2270e008cf471a4c7398f1337b946d957b4ce8be Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 1 Mar 2018 19:19:57 -0800 Subject: [PATCH 046/332] Mark SpeakerOutput class public --- framework/Source/iOS/MovieInput.swift | 2 +- framework/Source/iOS/MovieOutput.swift | 2 +- framework/SpeakerOutput.swift | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index afbb2f27..216a23ff 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -272,7 +272,7 @@ public class MovieInput: ImageSource { // Determine how much time we need to wait in order to catch up to the current time relative to the start // We are forcing the samples to adhear to their own sample times. - let delay = currentSampleTimeNanoseconds - (currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds) + let delay = currentSampleTimeNanoseconds - Int64(currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds) //print("currentSampleTime: \(currentSampleTimeNanoseconds) currentTime: \((currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds)) delay: \(delay)") diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index c9a47a9a..17cf7713 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -2,7 +2,7 @@ import AVFoundation extension String: Error {} -@objc public protocol AudioEncodingTarget { +public protocol AudioEncodingTarget { func activateAudioTrack() func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) // Note: This is not used for synchronized encoding. diff --git a/framework/SpeakerOutput.swift b/framework/SpeakerOutput.swift index 03f15df5..f4aa6d56 100644 --- a/framework/SpeakerOutput.swift +++ b/framework/SpeakerOutput.swift @@ -11,7 +11,7 @@ import Foundation import AudioToolbox import AVFoundation -class SpeakerOutput: AudioEncodingTarget { +public class SpeakerOutput: AudioEncodingTarget { public var changesAudioSession = true @@ -81,7 +81,7 @@ class SpeakerOutput: AudioEncodingTarget { // MARK: - // MARK: AudioEncodingTarget protocol - func activateAudioTrack() { + public func activateAudioTrack() { if(changesAudioSession) { do { try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryAmbient) @@ -168,7 +168,7 @@ class SpeakerOutput: AudioEncodingTarget { hasBuffer = false } - func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { + public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { defer { if(shouldInvalidateSampleWhenDone) { CMSampleBufferInvalidate(sampleBuffer) @@ -231,7 +231,7 @@ class SpeakerOutput: AudioEncodingTarget { } } - func readyForNextAudioBuffer() -> Bool { + public func readyForNextAudioBuffer() -> Bool { return isReadyForMoreMediaData } From 505dba8d2481f0c505078ab155a91c17b24eb967 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 1 Mar 2018 19:24:07 -0800 Subject: [PATCH 047/332] Resolve bridging header issues --- framework/GPUImage.xcodeproj/project.pbxproj | 34 +++++++++++--------- framework/{ => Source}/SpeakerOutput.swift | 0 framework/{ => Source}/TPCircularBuffer.h | 0 framework/{ => Source}/TPCircularBuffer.m | 0 4 files changed, 18 insertions(+), 16 deletions(-) rename framework/{ => Source}/SpeakerOutput.swift (100%) rename framework/{ => Source}/TPCircularBuffer.h (100%) rename framework/{ => Source}/TPCircularBuffer.m (100%) diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index 94200a32..c1c39d33 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -11,13 +11,14 @@ 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */; }; 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; - 1F6D1CAB2048F79C00317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CA92048F79C00317B5F /* TPCircularBuffer.h */; }; - 1F6D1CAC2048F79C00317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CA92048F79C00317B5F /* TPCircularBuffer.h */; }; - 1F6D1CAD2048F79C00317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CAA2048F79C00317B5F /* TPCircularBuffer.m */; }; - 1F6D1CAE2048F79C00317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CAA2048F79C00317B5F /* TPCircularBuffer.m */; }; - 1F6D1CB12048F7BC00317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CAF2048F7BB00317B5F /* SpeakerOutput.swift */; }; 1F6D1CB32048F81D00317B5F /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */; }; 1F6D1CB52048F8DD00317B5F /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */; }; + 1F6D1CB82048FB0300317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */; }; + 1F6D1CB92048FB0300317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */; }; + 1F6D1CBA2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; + 1F6D1CBB2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; + 1F6D1CBD2048FB0B00317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CBC2048FB0B00317B5F /* SpeakerOutput.swift */; }; + 1F6D1CBE2048FB0B00317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CBC2048FB0B00317B5F /* SpeakerOutput.swift */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; BC0923A11C92661D00A2ADFA /* Pipeline_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */; }; BC0923A21C92664900A2ADFA /* Framebuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB279EB1C8D11630013E213 /* Framebuffer.swift */; }; @@ -384,11 +385,11 @@ 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSObject+Exception.m"; path = "Source/NSObject+Exception.m"; sourceTree = ""; }; 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSObject+Exception.h"; path = "Source/NSObject+Exception.h"; sourceTree = ""; }; 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "GPUImage-Bridging-Header.h"; path = "Source/GPUImage-Bridging-Header.h"; sourceTree = ""; }; - 1F6D1CA92048F79C00317B5F /* TPCircularBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TPCircularBuffer.h; sourceTree = ""; }; - 1F6D1CAA2048F79C00317B5F /* TPCircularBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TPCircularBuffer.m; sourceTree = ""; }; - 1F6D1CAF2048F7BB00317B5F /* SpeakerOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SpeakerOutput.swift; sourceTree = ""; }; 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS11.2.sdk/System/Library/Frameworks/AudioToolbox.framework; sourceTree = DEVELOPER_DIR; }; 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS11.2.sdk/System/Library/Frameworks/AVFoundation.framework; sourceTree = DEVELOPER_DIR; }; + 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = TPCircularBuffer.h; path = Source/TPCircularBuffer.h; sourceTree = ""; }; + 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = TPCircularBuffer.m; path = Source/TPCircularBuffer.m; sourceTree = ""; }; + 1F6D1CBC2048FB0B00317B5F /* SpeakerOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SpeakerOutput.swift; path = Source/SpeakerOutput.swift; sourceTree = ""; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Pipeline_Tests.swift; path = Tests/Pipeline_Tests.swift; sourceTree = SOURCE_ROOT; }; BC1E12F41C9F2FD7008F844F /* ThreeInput.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ThreeInput.vsh; path = Source/Operations/Shaders/ThreeInput.vsh; sourceTree = ""; }; @@ -982,10 +983,10 @@ BC6E7CAD1C39A9D8006DF678 /* Other */ = { isa = PBXGroup; children = ( + 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */, + 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */, 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */, 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */, - 1F6D1CA92048F79C00317B5F /* TPCircularBuffer.h */, - 1F6D1CAA2048F79C00317B5F /* TPCircularBuffer.m */, BC4C85ED1C9F042900FD95D8 /* ConvertedShaders_GL.swift */, BC9E35531E52521F00B8604F /* ConvertedShaders_GLES.swift */, 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */, @@ -1151,7 +1152,7 @@ BC9E35201E524D2A00B8604F /* iOS */ = { isa = PBXGroup; children = ( - 1F6D1CAF2048F7BB00317B5F /* SpeakerOutput.swift */, + 1F6D1CBC2048FB0B00317B5F /* SpeakerOutput.swift */, BC9E35231E524D4D00B8604F /* RenderView.swift */, BC9E35221E524D4D00B8604F /* PictureOutput.swift */, BC9E35211E524D4D00B8604F /* MovieOutput.swift */, @@ -1228,7 +1229,7 @@ buildActionMask = 2147483647; files = ( 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, - 1F6D1CAB2048F79C00317B5F /* TPCircularBuffer.h in Headers */, + 1F6D1CB82048FB0300317B5F /* TPCircularBuffer.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1237,7 +1238,7 @@ buildActionMask = 2147483647; files = ( 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, - 1F6D1CAC2048F79C00317B5F /* TPCircularBuffer.h in Headers */, + 1F6D1CB92048FB0300317B5F /* TPCircularBuffer.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1481,7 +1482,7 @@ BCFF46C01CB9556B00A0C521 /* WhiteBalance.swift in Sources */, BC7FD14E1CB0BD3900037949 /* ZoomBlur.swift in Sources */, BCFB07921CBF37A1009B2333 /* TextureInput.swift in Sources */, - 1F6D1CAD2048F79C00317B5F /* TPCircularBuffer.m in Sources */, + 1F6D1CBA2048FB0300317B5F /* TPCircularBuffer.m in Sources */, BC6E7CC71C39AD9E006DF678 /* ShaderProgram.swift in Sources */, BCFF46CA1CB96BD700A0C521 /* HighPassFilter.swift in Sources */, BC7FD1321CB0A57F00037949 /* HighlightsAndShadows.swift in Sources */, @@ -1489,6 +1490,7 @@ BC7FD11C1CB0795A00037949 /* NormalBlend.swift in Sources */, BC4EE15E1CB3481F00AD8A65 /* ThresholdSobelEdgeDetection.swift in Sources */, BC7FD1911CB1D2A300037949 /* ImageGenerator.swift in Sources */, + 1F6D1CBD2048FB0B00317B5F /* SpeakerOutput.swift in Sources */, 1F499A731FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC7FD1201CB079B200037949 /* SaturationBlend.swift in Sources */, BCA4E2491CC3EF26007B51BA /* ColourFASTFeatureDetection.swift in Sources */, @@ -1666,7 +1668,7 @@ BC9E35511E52518F00B8604F /* Timestamp.swift in Sources */, BC9E35781E5256EB00B8604F /* ColorMatrixFilter.swift in Sources */, BC9E35D11E52580400B8604F /* ScreenBlend.swift in Sources */, - 1F6D1CAE2048F79C00317B5F /* TPCircularBuffer.m in Sources */, + 1F6D1CBB2048FB0300317B5F /* TPCircularBuffer.m in Sources */, BC9E356A1E5256C200B8604F /* Haze.swift in Sources */, BC9E35D31E52580A00B8604F /* SourceOverBlend.swift in Sources */, BC9E357E1E5256FE00B8604F /* Vibrance.swift in Sources */, @@ -1674,7 +1676,7 @@ BC9E356E1E5256CE00B8604F /* FalseColor.swift in Sources */, BC9E35881E52572000B8604F /* ThresholdSobelEdgeDetection.swift in Sources */, BC9E356F1E5256D000B8604F /* HighlightsAndShadows.swift in Sources */, - 1F6D1CB12048F7BC00317B5F /* SpeakerOutput.swift in Sources */, + 1F6D1CBE2048FB0B00317B5F /* SpeakerOutput.swift in Sources */, 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC9E35AA1E52578900B8604F /* Halftone.swift in Sources */, BC9E35961E52574A00B8604F /* ImageBuffer.swift in Sources */, diff --git a/framework/SpeakerOutput.swift b/framework/Source/SpeakerOutput.swift similarity index 100% rename from framework/SpeakerOutput.swift rename to framework/Source/SpeakerOutput.swift diff --git a/framework/TPCircularBuffer.h b/framework/Source/TPCircularBuffer.h similarity index 100% rename from framework/TPCircularBuffer.h rename to framework/Source/TPCircularBuffer.h diff --git a/framework/TPCircularBuffer.m b/framework/Source/TPCircularBuffer.m similarity index 100% rename from framework/TPCircularBuffer.m rename to framework/Source/TPCircularBuffer.m From 6577dc2bf75e94db1addb549a7c29db285437173 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 1 Mar 2018 19:45:55 -0800 Subject: [PATCH 048/332] Move SpeakerOutput to correct folder w/ public initializer --- framework/GPUImage.xcodeproj/project.pbxproj | 10 ++++------ framework/Source/{ => iOS}/SpeakerOutput.swift | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) rename framework/Source/{ => iOS}/SpeakerOutput.swift (99%) diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index c1c39d33..f8378d7c 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -17,8 +17,7 @@ 1F6D1CB92048FB0300317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */; }; 1F6D1CBA2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; 1F6D1CBB2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; - 1F6D1CBD2048FB0B00317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CBC2048FB0B00317B5F /* SpeakerOutput.swift */; }; - 1F6D1CBE2048FB0B00317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CBC2048FB0B00317B5F /* SpeakerOutput.swift */; }; + 1F6D1CC02048FFD900317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; BC0923A11C92661D00A2ADFA /* Pipeline_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */; }; BC0923A21C92664900A2ADFA /* Framebuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB279EB1C8D11630013E213 /* Framebuffer.swift */; }; @@ -389,7 +388,7 @@ 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS11.2.sdk/System/Library/Frameworks/AVFoundation.framework; sourceTree = DEVELOPER_DIR; }; 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = TPCircularBuffer.h; path = Source/TPCircularBuffer.h; sourceTree = ""; }; 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = TPCircularBuffer.m; path = Source/TPCircularBuffer.m; sourceTree = ""; }; - 1F6D1CBC2048FB0B00317B5F /* SpeakerOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SpeakerOutput.swift; path = Source/SpeakerOutput.swift; sourceTree = ""; }; + 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SpeakerOutput.swift; path = Source/iOS/SpeakerOutput.swift; sourceTree = ""; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Pipeline_Tests.swift; path = Tests/Pipeline_Tests.swift; sourceTree = SOURCE_ROOT; }; BC1E12F41C9F2FD7008F844F /* ThreeInput.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ThreeInput.vsh; path = Source/Operations/Shaders/ThreeInput.vsh; sourceTree = ""; }; @@ -1152,7 +1151,7 @@ BC9E35201E524D2A00B8604F /* iOS */ = { isa = PBXGroup; children = ( - 1F6D1CBC2048FB0B00317B5F /* SpeakerOutput.swift */, + 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */, BC9E35231E524D4D00B8604F /* RenderView.swift */, BC9E35221E524D4D00B8604F /* PictureOutput.swift */, BC9E35211E524D4D00B8604F /* MovieOutput.swift */, @@ -1490,7 +1489,6 @@ BC7FD11C1CB0795A00037949 /* NormalBlend.swift in Sources */, BC4EE15E1CB3481F00AD8A65 /* ThresholdSobelEdgeDetection.swift in Sources */, BC7FD1911CB1D2A300037949 /* ImageGenerator.swift in Sources */, - 1F6D1CBD2048FB0B00317B5F /* SpeakerOutput.swift in Sources */, 1F499A731FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC7FD1201CB079B200037949 /* SaturationBlend.swift in Sources */, BCA4E2491CC3EF26007B51BA /* ColourFASTFeatureDetection.swift in Sources */, @@ -1676,7 +1674,7 @@ BC9E356E1E5256CE00B8604F /* FalseColor.swift in Sources */, BC9E35881E52572000B8604F /* ThresholdSobelEdgeDetection.swift in Sources */, BC9E356F1E5256D000B8604F /* HighlightsAndShadows.swift in Sources */, - 1F6D1CBE2048FB0B00317B5F /* SpeakerOutput.swift in Sources */, + 1F6D1CC02048FFD900317B5F /* SpeakerOutput.swift in Sources */, 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC9E35AA1E52578900B8604F /* Halftone.swift in Sources */, BC9E35961E52574A00B8604F /* ImageBuffer.swift in Sources */, diff --git a/framework/Source/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift similarity index 99% rename from framework/Source/SpeakerOutput.swift rename to framework/Source/iOS/SpeakerOutput.swift index f4aa6d56..9f5217fb 100644 --- a/framework/Source/SpeakerOutput.swift +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -46,7 +46,7 @@ public class SpeakerOutput: AudioEncodingTarget { var rescueBufferContentsSize:UInt32 = 0 - init() { + public init() { circularBufferSize = bufferUnit * unitSize rescueBufferSize = Int(bufferUnit / 2) } From 549282e0c7e74831d0f8e94f4d00284b28889cd3 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 1 Mar 2018 20:39:09 -0800 Subject: [PATCH 049/332] Resolve issue where video playback would not catch up to its timeline --- framework/Source/iOS/MovieInput.swift | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 216a23ff..b0c48a72 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -270,8 +270,8 @@ public class MovieInput: ImageSource { if(self.actualStartTime == nil) { self.actualStartTime = currentActualTime } - // Determine how much time we need to wait in order to catch up to the current time relative to the start - // We are forcing the samples to adhear to their own sample times. + // Determine how much time we need to wait in order to display the frame at the right current time relative to the start + // What we are doing is forcing the samples to adhear to their own sample times. let delay = currentSampleTimeNanoseconds - Int64(currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds) //print("currentSampleTime: \(currentSampleTimeNanoseconds) currentTime: \((currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds)) delay: \(delay)") @@ -279,6 +279,14 @@ public class MovieInput: ImageSource { if(delay > 0) { mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(delay))) } + else { + // This only happens if we aren't given enough processing time for playback + // but is necessary otherwise the playback will never catch up to its timeline. + // If we weren't adhearing to the sample timline and used the old timing method + // the video would still lag during an event like this. + //print("Dropping frame in order to catch up") + return + } } self.currentTime = currentSampleTime From 3ac26eb11cfe5c525259eef82d32bbbdf6d0680f Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Fri, 2 Mar 2018 10:39:22 -0800 Subject: [PATCH 050/332] Improved SpeakerOuput start() and stop(), fix timing issue when using start(atTime:) in MovieInput --- framework/Source/iOS/MovieInput.swift | 45 ++++++++++++++++++-------- framework/Source/iOS/MovieOutput.swift | 12 +++---- 2 files changed, 37 insertions(+), 20 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index b0c48a72..65d5a7d4 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -19,7 +19,7 @@ public class MovieInput: ImageSource { } audioEncodingTarget.activateAudioTrack() - // Call enableSynchronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput + // Call enableSynchronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput. if(synchronizedMovieOutput != nil) { self.enableSynchronizedEncoding() } } } @@ -28,10 +28,16 @@ public class MovieInput: ImageSource { let asset:AVAsset let videoComposition:AVVideoComposition? var playAtActualSpeed:Bool + + // Time in the video where it should start. var requestedStartTime:CMTime? + // Time in the video where it started. + var startTime:CMTime? + // Time according to device clock when the video started. var actualStartTime:DispatchTime? - + // Last sample time that played. private(set) public var currentTime:CMTime? + public var loop:Bool public var completion: (() -> Void)? @@ -96,7 +102,7 @@ public class MovieInput: ImageSource { // If the current thread is running and has not been cancelled, bail. return } - // Cancel the thread just to be safe in the event we somehow get here with the thread still running + // Cancel the thread just to be safe in the event we somehow get here with the thread still running. self.currentThread?.cancel() self.currentThread = Thread(target: self, selector: #selector(beginReading), object: nil) @@ -143,6 +149,7 @@ public class MovieInput: ImageSource { assetReader.add(readerAudioTrackOutput) } + self.startTime = requestedStartTime if let requestedStartTime = self.requestedStartTime { assetReader.timeRange = CMTimeRange(start: requestedStartTime, duration: kCMTimePositiveInfinity) } @@ -169,12 +176,12 @@ public class MovieInput: ImageSource { thread.qualityOfService = .userInitiated } else { - // This includes syncronized encoding since the above vars will be disabled for it + // This includes syncronized encoding since the above vars will be disabled for it. thread.qualityOfService = .default } guard let assetReader = self.createReader() else { - return // A return statement in this frame will end thread execution + return // A return statement in this frame will end thread execution. } do { @@ -218,21 +225,26 @@ public class MovieInput: ImageSource { self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) } if(movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { - if let readerAudioTrackOutput = readerAudioTrackOutput { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } + if let readerAudioTrackOutput = readerAudioTrackOutput { + self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) + } } } else { self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) - if let readerAudioTrackOutput = readerAudioTrackOutput, self.audioEncodingTarget?.readyForNextAudioBuffer() ?? true { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } + if let readerAudioTrackOutput = readerAudioTrackOutput, + self.audioEncodingTarget?.readyForNextAudioBuffer() ?? true { + self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) + } } } assetReader.cancelReading() // Since only the main thread will cancel and create threads jump onto it to prevent - // the current thread from being cancelled in between the below if statement and creating the new thread + // the current thread from being cancelled in between the below if statement and creating the new thread. DispatchQueue.main.async { - // Start the video over so long as it wasn't cancelled + // Start the video over so long as it wasn't cancelled. if (self.loop && !thread.isCancelled) { self.currentThread = Thread(target: self, selector: #selector(self.beginReading), object: nil) self.currentThread?.start() @@ -262,7 +274,14 @@ public class MovieInput: ImageSource { if(self.synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print("Process frame input") } - let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + + self.currentTime = currentSampleTime + + if let startTime = self.startTime { + // Make sure our samples start at kCMTimeZero if the video was started midway. + currentSampleTime = CMTimeSubtract(currentSampleTime, startTime) + } if (self.playAtActualSpeed) { let currentSampleTimeNanoseconds = Int64(currentSampleTime.seconds * 1_000_000_000) @@ -270,8 +289,8 @@ public class MovieInput: ImageSource { if(self.actualStartTime == nil) { self.actualStartTime = currentActualTime } - // Determine how much time we need to wait in order to display the frame at the right current time relative to the start - // What we are doing is forcing the samples to adhear to their own sample times. + // Determine how much time we need to wait in order to display the frame at the right currentActualTime such that it will match the currentSampleTime. + // The reason we subtract the actualStartTime from the currentActualTime is so the actual time starts at zero relative to the video start. let delay = currentSampleTimeNanoseconds - Int64(currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds) //print("currentSampleTime: \(currentSampleTimeNanoseconds) currentTime: \((currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds)) delay: \(delay)") @@ -289,8 +308,6 @@ public class MovieInput: ImageSource { } } - self.currentTime = currentSampleTime - sharedImageProcessingContext.runOperationSynchronously{ self.process(movieFrame:sampleBuffer) CMSampleBufferInvalidate(sampleBuffer) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 17cf7713..2481961b 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -40,7 +40,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, audioSettings:[String:Any]? = nil) throws { imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup - // Since we cannot access self before calling super, initialize here and not above + // Since we cannot access self before calling super, initialize this here and not above. let movieProcessingContext = OpenGLContext() if movieProcessingContext.supportsTextureCaches() { @@ -86,7 +86,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func startRecording(_ completionCallback:((_ started: Bool) -> Void)? = nil) { // Don't do this work on the movieProcessingContext que so we don't block it. // If it does get blocked framebuffers will pile up and after it is no longer blocked/this work has finished - // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped + // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. DispatchQueue.global(qos: .utility).async { do { var success = false @@ -119,7 +119,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) - // This work must be done on the movieProcessingContext since we access openGL + // This work must be done on the movieProcessingContext since we access openGL. try self.movieProcessingContext.runOperationSynchronously { let bufferSize = GLSize(self.size) var cachedTextureRef:CVOpenGLESTexture? = nil @@ -157,7 +157,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if let lastFrame = self.previousFrameTime { // Resolve black frames at the end. If we only call finishWriting() the session's effective end time - // will be the latest end timestamp of the session's samples which could be either video or audio + // will be the latest end timestamp of the session's samples which could be either video or audio. self.assetWriter.endSession(atSourceTime: lastFrame) } @@ -178,11 +178,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Ignore still images and other non-video updates (do I still need this?) guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. guard (frameTime != self.previousFrameTime) else { return } if (self.previousFrameTime == nil) { - // This resolves black frames at the beginning. Any samples recieved before this time will be edited out + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. self.assetWriter.startSession(atSourceTime: frameTime) } From 2791f57637d2278b4f82f3117dabc816d99c4566 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Fri, 2 Mar 2018 10:40:10 -0800 Subject: [PATCH 051/332] Improved SpeakerOuput start() and stop() --- framework/Source/iOS/SpeakerOutput.swift | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift index 9f5217fb..3ad8d795 100644 --- a/framework/Source/iOS/SpeakerOutput.swift +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -15,7 +15,7 @@ public class SpeakerOutput: AudioEncodingTarget { public var changesAudioSession = true - var isPlaying = false + public private(set) var isPlaying = false var hasBuffer = false var isReadyForMoreMediaData = true { willSet { @@ -67,15 +67,24 @@ public class SpeakerOutput: AudioEncodingTarget { // MARK: Playback control public func start() { + if(isPlaying) { return } + AUGraphStart(processingGraph!) isPlaying = true } public func stop() { + if(!isPlaying) { return } + AUGraphStop(processingGraph!) isPlaying = false + + rescueBufferContentsSize = 0 + TPCircularBufferClear(&circularBuffer) + hasBuffer = false + isReadyForMoreMediaData = true } // MARK: - @@ -210,7 +219,8 @@ public class SpeakerOutput: AudioEncodingTarget { // This is actually doing audioBufferList.mBuffers[0] // Since the struct has an array of length of 1 the compiler is interpreting // it as a single item array and not letting us use the above line. - // Since the array pointer points to the first item of the c array this is equally fine. + // Since the array pointer points to the first item of the c array + // and all we want is the first item this is equally fine. let audioBuffer = audioBufferList.mBuffers // Place the AudioBufferList in the circular buffer From a0ea9acf9463725c605dcac5dd5afa9bec404dbf Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 8 Mar 2018 15:29:44 -0800 Subject: [PATCH 052/332] Improved synchronized encoding --- framework/Source/iOS/MovieInput.swift | 16 ++++--- framework/Source/iOS/MovieOutput.swift | 66 +++++++++++++++++++++----- 2 files changed, 64 insertions(+), 18 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 65d5a7d4..815af4a2 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -215,9 +215,9 @@ public class MovieInput: ImageSource { if let movieOutput = self.synchronizedMovieOutput { self.conditionLock.lock() if(self.readingShouldWait) { - if(synchronizedEncodingDebug) { print("Disable reading") } + self.synchronizedEncodingDebugPrint("Disable reading") self.conditionLock.wait() - if(synchronizedEncodingDebug) { print("Enable reading") } + self.synchronizedEncodingDebugPrint("Enable reading") } self.conditionLock.unlock() @@ -253,7 +253,7 @@ public class MovieInput: ImageSource { self.delegate?.didFinishMovie() self.completion?() - if(self.synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print("Synchronized encoding finished") } + self.synchronizedEncodingDebugPrint("MovieInput finished reading") } } } @@ -272,7 +272,7 @@ public class MovieInput: ImageSource { return } - if(self.synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print("Process frame input") } + self.synchronizedEncodingDebugPrint("Process frame input") var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) @@ -325,7 +325,7 @@ public class MovieInput: ImageSource { return } - if(self.synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print("Process audio sample input") } + self.synchronizedEncodingDebugPrint("Process audio sample input") self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: true) } @@ -526,11 +526,15 @@ public class MovieInput: ImageSource { if ret != KERN_SUCCESS { mach_error("thread_policy_set:", ret) - fatalError("Unable to configure thread") + print("Unable to configure thread") } } func nanosToAbs(_ nanos: UInt64) -> UInt64 { return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer) } + + func synchronizedEncodingDebugPrint(_ string: String) { + if(synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print(string) } + } } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 2481961b..5bc90b75 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -53,7 +53,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriter = try AVAssetWriter(url:URL, fileType:fileType) // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last 1/4 second should be lost in that case. - assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(0.25, 1000) + assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1, 1000) var localSettings:[String:Any] if let videoSettings = videoSettings { @@ -150,6 +150,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return } + // Wait for any remaining framebuffers/audio samples to process before finishing + sharedImageProcessingContext.runOperationSynchronously { + + } + self.audioEncodingIsFinished = true self.videoEncodingIsFinished = true @@ -164,16 +169,20 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.assetWriter.finishWriting { completionCallback?() } + self.synchronizedEncodingDebugPrint("MovieOutput finished writing") } } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); - movieProcessingContext.runOperationAsynchronously { + let work = { guard self.isRecording, self.assetWriter.status == .writing, - !self.videoEncodingIsFinished else { return } + !self.videoEncodingIsFinished else { + self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") + return + } // Ignore still images and other non-video updates (do I still need this?) guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } @@ -194,7 +203,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } while(!self.assetWriterVideoInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.videoEncodingIsFinished) { - if(synchronizedEncodingDebug) { print("Video waiting...") } + self.synchronizedEncodingDebugPrint("Video waiting...") // Better to poll isReadyForMoreMediaData often since when it does become true // we don't want to risk letting framebuffers pile up in between poll intervals. usleep(100000) // 0.1 seconds @@ -207,18 +216,40 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) - if(synchronizedEncodingDebug && !self.encodingLiveVideo) { print("Process frame output") } + self.synchronizedEncodingDebugPrint("Process frame output") - if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { - debugPrint("Problem appending pixel buffer at time: \(frameTime)") + do { + try NSObject.catchException { + if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { + debugPrint("Problem appending pixel buffer at time: \(frameTime)") + } + } + } + catch { + print("Trouble appending audio sample buffer: \(error)") } CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) if !self.movieProcessingContext.supportsTextureCaches() { self.pixelBuffer = nil } + + sharedImageProcessingContext.runOperationAsynchronously { + framebuffer.unlock() + } + } - framebuffer.unlock() + if(self.encodingLiveVideo) { + // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext que + // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur + // since the framebuffers come in much slower than during synchronized encoding. + movieProcessingContext.runOperationAsynchronously(work) + } + else { + // This is done synchronously to prevent framebuffers from piling up during synchronized encoding. + // If we don't force the sharedImageProcessingContext que to wait for this frame to finish processing it will + // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext que would run when the system wants it to. + movieProcessingContext.runOperationSynchronously(work) } } @@ -271,14 +302,21 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } while(!assetWriterAudioInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.audioEncodingIsFinished) { - if(synchronizedEncodingDebug) { print("Audio waiting...") } + self.synchronizedEncodingDebugPrint("Audio waiting...") usleep(100000) } - if(synchronizedEncodingDebug && !self.encodingLiveVideo) { print("Process audio sample output") } + self.synchronizedEncodingDebugPrint("Process audio sample output") - if (!assetWriterAudioInput.append(sampleBuffer)) { - print("Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + do { + try NSObject.catchException { + if (!assetWriterAudioInput.append(sampleBuffer)) { + print("Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + } + } + } + catch { + print("Trouble appending audio sample buffer: \(error)") } } @@ -294,6 +332,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func readyForNextAudioBuffer() -> Bool { return true } + + func synchronizedEncodingDebugPrint(_ string: String) { + if(synchronizedEncodingDebug && !encodingLiveVideo) { print(string) } + } } From effc76d803ce7dc2b5d6e56e312f8ee5de29f758 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 11 Mar 2018 01:57:24 -0800 Subject: [PATCH 053/332] Add throw support to PictureInput --- .../Source/Operations/AmatorkaFilter.swift | 2 +- .../Source/Operations/MissEtikateFilter.swift | 2 +- .../Source/Operations/SoftElegance.swift | 4 +- framework/Source/iOS/MovieOutput.swift | 4 +- framework/Source/iOS/PictureInput.swift | 30 ++++--- framework/Source/iOS/PictureOutput.swift | 8 +- framework/Source/iOS/RenderView.swift | 78 +++++++++---------- framework/Source/iOS/SpeakerOutput.swift | 4 +- 8 files changed, 65 insertions(+), 67 deletions(-) diff --git a/framework/Source/Operations/AmatorkaFilter.swift b/framework/Source/Operations/AmatorkaFilter.swift index fc569eaf..cf645454 100755 --- a/framework/Source/Operations/AmatorkaFilter.swift +++ b/framework/Source/Operations/AmatorkaFilter.swift @@ -11,7 +11,7 @@ public class AmatorkaFilter: LookupFilter { public override init() { super.init() - ({lookupImage = PictureInput(imageName:"lookup_amatorka.png")})() + ({lookupImage = try? PictureInput(imageName:"lookup_amatorka.png")})() ({intensity = 1.0})() } } diff --git a/framework/Source/Operations/MissEtikateFilter.swift b/framework/Source/Operations/MissEtikateFilter.swift index 16d60168..05a2d260 100755 --- a/framework/Source/Operations/MissEtikateFilter.swift +++ b/framework/Source/Operations/MissEtikateFilter.swift @@ -10,7 +10,7 @@ public class MissEtikateFilter: LookupFilter { public override init() { super.init() - ({lookupImage = PictureInput(imageName:"lookup_miss_etikate.png")})() + ({lookupImage = try? PictureInput(imageName:"lookup_miss_etikate.png")})() } } #endif diff --git a/framework/Source/Operations/SoftElegance.swift b/framework/Source/Operations/SoftElegance.swift index 50e85ef3..fa5d4f1c 100755 --- a/framework/Source/Operations/SoftElegance.swift +++ b/framework/Source/Operations/SoftElegance.swift @@ -9,8 +9,8 @@ public class SoftElegance: OperationGroup { super.init() self.configureGroup{input, output in - self.lookup1.lookupImage = PictureInput(imageName:"lookup_soft_elegance_1.png") - self.lookup2.lookupImage = PictureInput(imageName:"lookup_soft_elegance_2.png") + self.lookup1.lookupImage = try? PictureInput(imageName:"lookup_soft_elegance_1.png") + self.lookup2.lookupImage = try? PictureInput(imageName:"lookup_soft_elegance_2.png") self.gaussianBlur.blurRadiusInPixels = 10.0 self.alphaBlend.mix = 0.14 diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 5bc90b75..7e5f8052 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -1,6 +1,8 @@ import AVFoundation -extension String: Error {} +extension String: LocalizedError { + public var errorDescription: String? { return self } +} public protocol AudioEncodingTarget { func activateAudioTrack() diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index fac0f0be..7bb513fd 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -6,12 +6,12 @@ public class PictureInput: ImageSource { var imageFramebuffer:Framebuffer? var hasProcessedImage:Bool = false - public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { + public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { let widthOfImage = GLint(image.width) let heightOfImage = GLint(image.height) // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. - guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } + guard((widthOfImage > 0) && (heightOfImage > 0)) else { throw "Tried to pass in a zero-sized image" } var widthToUseForTexture = widthOfImage var heightToUseForTexture = heightOfImage @@ -72,7 +72,7 @@ public class PictureInput: ImageSource { } } - sharedImageProcessingContext.runOperationSynchronously{ + try sharedImageProcessingContext.runOperationSynchronously{ // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); if (shouldRedrawUsingCoreGraphics) { @@ -86,19 +86,15 @@ public class PictureInput: ImageSource { imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) } else { // Access the raw image bytes directly - guard let data = image.dataProvider?.data else { return } + guard let data = image.dataProvider?.data else { throw "Unable to retrieve image dataProvider" } dataFromImageDataProvider = data imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)) } - do { - // TODO: Alter orientation based on metadata from photo - self.imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) - self.imageFramebuffer!.lock() - } catch { - print("ERROR: Unable to initialize framebuffer of size (\(widthToUseForTexture), \(heightToUseForTexture)) with error: \(error)") - return - } + // TODO: Alter orientation based on metadata from photo + self.imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) + self.imageFramebuffer!.lock() + glBindTexture(GLenum(GL_TEXTURE_2D), self.imageFramebuffer!.texture) if (smoothlyScaleOutput) { @@ -119,13 +115,13 @@ public class PictureInput: ImageSource { } - public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { + try self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } - public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - guard let image = UIImage(named:imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } - self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { + guard let image = UIImage(named:imageName) else { throw "No such image named: \(imageName) in your application bundle" } + try self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } deinit { diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 6e434bf6..1db2110b 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -114,14 +114,14 @@ public extension ImageSource { } public extension UIImage { - public func filterWithOperation(_ operation:T) -> UIImage { - return filterWithPipeline{input, output in + public func filterWithOperation(_ operation:T) throws -> UIImage { + return try filterWithPipeline{input, output in input --> operation --> output } } - public func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) -> UIImage { - let picture = PictureInput(image:self) + public func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) throws -> UIImage { + let picture = try PictureInput(image:self) var outputImage:UIImage? let pictureOutput = PictureOutput() pictureOutput.onlyCaptureNextFrame = true diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 96511d6a..68f032b8 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -10,14 +10,14 @@ public protocol RenderViewDelegate: class { public class RenderView:UIView, ImageConsumer { public weak var delegate:RenderViewDelegate? + public var shouldPresentWithTransaction = false + public var waitsForTransaction = true + public var backgroundRenderColor = Color.black public var fillMode = FillMode.preserveAspectRatio public var orientation:ImageOrientation = .portrait public var sizeInPixels:Size { get { return Size(width:Float(frame.size.width * contentScaleFactor), height:Float(frame.size.height * contentScaleFactor))}} - public var shouldPresentWithTransaction = false - public var waitsForTransaction = true - public let sources = SourceContainer() public let maximumInputs:UInt = 1 var displayFramebuffer:GLuint? @@ -51,7 +51,9 @@ public class RenderView:UIView, ImageConsumer { // Check if the size changed if(oldValue.size != self.bounds.size) { // Destroy the displayFramebuffer so we render at the correct size for the next frame - self.destroyDisplayFramebuffer() + sharedImageProcessingContext.runOperationAsynchronously{ + self.destroyDisplayFramebuffer() + } } } } @@ -67,7 +69,9 @@ public class RenderView:UIView, ImageConsumer { } deinit { - destroyDisplayFramebuffer() + sharedImageProcessingContext.runOperationSynchronously{ + destroyDisplayFramebuffer() + } } var waitingForTransaction = false @@ -86,7 +90,6 @@ public class RenderView:UIView, ImageConsumer { } func createDisplayFramebuffer() -> Bool { - sharedImageProcessingContext.makeCurrentContext() var newDisplayFramebuffer:GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) displayFramebuffer = newDisplayFramebuffer @@ -100,14 +103,7 @@ public class RenderView:UIView, ImageConsumer { // Without the flush I occasionally get a warning from UIKit on the camera renderView and // when the warning comes in the renderView just stays black. This happens rarely but often enough to be a problem. // I tried a transaction and it doesn't silence it and this is likely why --> http://danielkbx.com/post/108060601989/catransaction-flush - // This flush defeats the purpose of presentWithTransaction() so it should only be enabled when you need it. - // The idea with presentWithTransaction() is to be able to change the bounds of this renderView, then draw contents into it - // at the correct bounds without any blips in between. If you have this flush() in place it will force a layout pass in the middle of that - // causing the old contents to be briefly distorted while the new contents are yet to be drawn. - // That is why this shouldn't be used in media playback scenarios. - if(!shouldPresentWithTransaction) { - CATransaction.flush() - } + CATransaction.flush() sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.internalLayer) var backingWidth:GLint = 0 @@ -152,18 +148,15 @@ public class RenderView:UIView, ImageConsumer { } func destroyDisplayFramebuffer() { - sharedImageProcessingContext.runOperationSynchronously{ - if let displayFramebuffer = self.displayFramebuffer { - var temporaryFramebuffer = displayFramebuffer - glDeleteFramebuffers(1, &temporaryFramebuffer) - self.displayFramebuffer = nil - } - - if let displayRenderbuffer = self.displayRenderbuffer { - var temporaryRenderbuffer = displayRenderbuffer - glDeleteRenderbuffers(1, &temporaryRenderbuffer) - self.displayRenderbuffer = nil - } + if let displayFramebuffer = self.displayFramebuffer { + var temporaryFramebuffer = displayFramebuffer + glDeleteFramebuffers(1, &temporaryFramebuffer) + self.displayFramebuffer = nil + } + if let displayRenderbuffer = self.displayRenderbuffer { + var temporaryRenderbuffer = displayRenderbuffer + glDeleteRenderbuffers(1, &temporaryRenderbuffer) + self.displayRenderbuffer = nil } } @@ -173,16 +166,20 @@ public class RenderView:UIView, ImageConsumer { } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - let processFramebuffer = { + let work: () -> Void = { // Don't bog down UIKIt with a bunch of framebuffers if we are waiting for a transaction to complete // otherwise we will block the main thread as it trys to catch up. - if (self.waitingForTransaction && self.waitsForTransaction) { return } + if (self.waitingForTransaction && self.waitsForTransaction) { + framebuffer.unlock() + return + } self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) - sharedImageProcessingContext.runOperationSynchronously { + sharedImageProcessingContext.runOperationAsynchronously { if (self.displayFramebuffer == nil && !self.createDisplayFramebuffer()) { // Bail if we couldn't successfully create the displayFramebuffer + framebuffer.unlock() return } self.activateDisplayFramebuffer() @@ -195,12 +192,17 @@ public class RenderView:UIView, ImageConsumer { glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.displayRenderbuffer!) sharedImageProcessingContext.presentBufferForDisplay() - } - - self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) - - sharedImageProcessingContext.runOperationSynchronously { - framebuffer.unlock() + + if(self.delegate?.shouldDisplayNextFramebufferOnMainThread() ?? false) { + DispatchQueue.main.async { + self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + framebuffer.unlock() + } + } + else { + self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + framebuffer.unlock() + } } } @@ -209,12 +211,10 @@ public class RenderView:UIView, ImageConsumer { // If you are curious, change this to sync, then try trimming/scrubbing a video // Before that happens you will get a deadlock when someone calls runOperationSynchronously since the main thread is blocked // There is a way to get around this but then the first thing mentioned will happen - DispatchQueue.main.async { - processFramebuffer() - } + DispatchQueue.main.async(execute: work) } else { - processFramebuffer() + work() } } } diff --git a/framework/Source/iOS/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift index 3ad8d795..62061d2b 100644 --- a/framework/Source/iOS/SpeakerOutput.swift +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -67,7 +67,7 @@ public class SpeakerOutput: AudioEncodingTarget { // MARK: Playback control public func start() { - if(isPlaying) { return } + if(isPlaying || processingGraph == nil) { return } AUGraphStart(processingGraph!) @@ -75,7 +75,7 @@ public class SpeakerOutput: AudioEncodingTarget { } public func stop() { - if(!isPlaying) { return } + if(!isPlaying || processingGraph == nil) { return } AUGraphStop(processingGraph!) From c881258e278c8462fe8c526a6f968288f5d77add Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 12 Mar 2018 21:19:06 -0700 Subject: [PATCH 054/332] Add progress block support to check video encoding progress --- framework/Source/iOS/MovieInput.swift | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 815af4a2..e659c974 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -40,7 +40,11 @@ public class MovieInput: ImageSource { public var loop:Bool + // Called after the video finishes. Not called when cancel() or pause() is called. public var completion: (() -> Void)? + // Progress block of the video with a paramater value of 0-1. + // Can be used to check video encoding progress. Not called from main thread. + public var progress: ((Double) -> Void)? public var synchronizedMovieOutput:MovieOutput? { didSet { @@ -149,7 +153,7 @@ public class MovieInput: ImageSource { assetReader.add(readerAudioTrackOutput) } - self.startTime = requestedStartTime + self.startTime = self.requestedStartTime if let requestedStartTime = self.requestedStartTime { assetReader.timeRange = CMTimeRange(start: requestedStartTime, duration: kCMTimePositiveInfinity) } @@ -275,12 +279,14 @@ public class MovieInput: ImageSource { self.synchronizedEncodingDebugPrint("Process frame input") var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + var duration = self.asset.duration // Only used for the progress block so its acuracy is not critical self.currentTime = currentSampleTime if let startTime = self.startTime { // Make sure our samples start at kCMTimeZero if the video was started midway. currentSampleTime = CMTimeSubtract(currentSampleTime, startTime) + duration = CMTimeSubtract(duration, startTime) } if (self.playAtActualSpeed) { @@ -308,6 +314,8 @@ public class MovieInput: ImageSource { } } + self.progress?(currentSampleTime.seconds/duration.seconds) + sharedImageProcessingContext.runOperationSynchronously{ self.process(movieFrame:sampleBuffer) CMSampleBufferInvalidate(sampleBuffer) From 392c2bcf5da06e27715e1207585666b27698c6a1 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 12 Mar 2018 21:45:15 -0700 Subject: [PATCH 055/332] Resolve synchronized encoding deadlock --- framework/Source/iOS/MovieOutput.swift | 5 ----- 1 file changed, 5 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 7e5f8052..65fb3a52 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -152,11 +152,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return } - // Wait for any remaining framebuffers/audio samples to process before finishing - sharedImageProcessingContext.runOperationSynchronously { - - } - self.audioEncodingIsFinished = true self.videoEncodingIsFinished = true From 443e2bc6c1adb2ff04e0cb70e061a9020f24439d Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 15 Mar 2018 22:54:00 -0700 Subject: [PATCH 056/332] Fix playback studdering issue on non iPhone X devices Remove present with transaction funcionality --- framework/Source/iOS/RenderView.swift | 98 ++++++++++----------------- 1 file changed, 34 insertions(+), 64 deletions(-) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 68f032b8..c550c74e 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -3,16 +3,17 @@ import UIKit public protocol RenderViewDelegate: class { func willDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) func didDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) - func shouldDisplayNextFramebufferOnMainThread() -> Bool + // Only use this if you need to do layout in willDisplayFramebuffer before the framebuffer actually gets displayed + // Typically should only be used for one frame otherwise will cause serious playback issues + // When true the above delegate methods will be called from the main thread instead of the sharedImageProcessing que + // Default is false + func shouldDisplayNextFramebufferAfterMainThreadLoop() -> Bool } // TODO: Add support for transparency public class RenderView:UIView, ImageConsumer { public weak var delegate:RenderViewDelegate? - public var shouldPresentWithTransaction = false - public var waitsForTransaction = true - public var backgroundRenderColor = Color.black public var fillMode = FillMode.preserveAspectRatio public var orientation:ImageOrientation = .portrait @@ -64,6 +65,7 @@ public class RenderView:UIView, ImageConsumer { let eaglLayer = self.layer as! CAEAGLLayer eaglLayer.isOpaque = true eaglLayer.drawableProperties = [kEAGLDrawablePropertyRetainedBacking: NSNumber(value:false), kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8] + eaglLayer.contentsGravity = kCAGravityResizeAspectFill // Just for safety to prevent distortion self.internalLayer = eaglLayer } @@ -74,21 +76,6 @@ public class RenderView:UIView, ImageConsumer { } } - var waitingForTransaction = false - func presentWithTransaction() { - if #available(iOS 9.0, *) { - self.internalLayer.presentsWithTransaction = true - self.waitingForTransaction = true - - CATransaction.begin() - CATransaction.setCompletionBlock({ - self.internalLayer.presentsWithTransaction = false - self.waitingForTransaction = false - }) - CATransaction.commit() - } - } - func createDisplayFramebuffer() -> Bool { var newDisplayFramebuffer:GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) @@ -103,6 +90,9 @@ public class RenderView:UIView, ImageConsumer { // Without the flush I occasionally get a warning from UIKit on the camera renderView and // when the warning comes in the renderView just stays black. This happens rarely but often enough to be a problem. // I tried a transaction and it doesn't silence it and this is likely why --> http://danielkbx.com/post/108060601989/catransaction-flush + // This is also very important because it guarantees the view is layed out at the correct size before it is drawn to. + // Its possible the size of the view was changed right before this was called which would result in us drawing to the view at the old size + // and then the view size would change to the new size at the next layout pass and distort our already drawn image. CATransaction.flush() sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.internalLayer) @@ -129,21 +119,6 @@ public class RenderView:UIView, ImageConsumer { return false } - // Prevent the first frame from prematurely drawing before the view is drawn to the screen at the right size - // Aka we want to briefly synchronize UIKit with OpenGL. OpenGL draws immediately but UIKit draws in cycles. - // Note: We have to wait for the transaction to finish (aka for the drawing cycle to finish) before we disable this - // we can't just disable presentsWithTransaction after the first frame because it may even take a couple frames for - // a UIKit drawing cycle to complete (rarely but sometimes) - // Without this you will get weird content flashes when switching between videos of different size - // since the content will be drawn into a view that which although has the right frame/bounds it is not - // yet actually reflected on the screen. OpenGL would just draw right into the wrongly displayed view - // as soon as presentBufferForDisplay() is called. - // Source --> https://stackoverflow.com/a/30722276/1275014 - // Source --> https://developer.apple.com/documentation/quartzcore/caeagllayer/1618676-presentswithtransaction - if(shouldPresentWithTransaction) { - self.presentWithTransaction() - } - return true } @@ -167,51 +142,46 @@ public class RenderView:UIView, ImageConsumer { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { let work: () -> Void = { - // Don't bog down UIKIt with a bunch of framebuffers if we are waiting for a transaction to complete - // otherwise we will block the main thread as it trys to catch up. - if (self.waitingForTransaction && self.waitsForTransaction) { + if (self.displayFramebuffer == nil && !self.createDisplayFramebuffer()) { + // Bail if we couldn't successfully create the displayFramebuffer framebuffer.unlock() return } + self.activateDisplayFramebuffer() - self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + clearFramebufferWithColor(self.backgroundRenderColor) - sharedImageProcessingContext.runOperationAsynchronously { - if (self.displayFramebuffer == nil && !self.createDisplayFramebuffer()) { - // Bail if we couldn't successfully create the displayFramebuffer - framebuffer.unlock() - return - } - self.activateDisplayFramebuffer() - - clearFramebufferWithColor(self.backgroundRenderColor) - - let scaledVertices = self.fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(self.orientation), toFitSize:self.backingSize) - renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(self.orientation)]) - - glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.displayRenderbuffer!) - - sharedImageProcessingContext.presentBufferForDisplay() - - if(self.delegate?.shouldDisplayNextFramebufferOnMainThread() ?? false) { - DispatchQueue.main.async { - self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) - framebuffer.unlock() - } - } - else { + let scaledVertices = self.fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(self.orientation), toFitSize:self.backingSize) + renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(self.orientation)]) + + glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.displayRenderbuffer!) + + sharedImageProcessingContext.presentBufferForDisplay() + + if(self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false) { + DispatchQueue.main.async { self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) framebuffer.unlock() } } + else { + self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + framebuffer.unlock() + } } - if(self.delegate?.shouldDisplayNextFramebufferOnMainThread() ?? false) { + if(self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false) { // CAUTION: Never call sync from the sharedImageProcessingContext, it will cause cyclic thread deadlocks // If you are curious, change this to sync, then try trimming/scrubbing a video // Before that happens you will get a deadlock when someone calls runOperationSynchronously since the main thread is blocked // There is a way to get around this but then the first thing mentioned will happen - DispatchQueue.main.async(execute: work) + DispatchQueue.main.async { + self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + + sharedImageProcessingContext.runOperationAsynchronously { + work() + } + } } else { work() From c1d7afdb60166fde2ff99c168cddf917661aa5cd Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 26 Mar 2018 13:04:50 -0700 Subject: [PATCH 057/332] Resolve syncronized encoding issue for videos with no sound --- framework/Source/iOS/MovieInput.swift | 7 +++++-- framework/Source/iOS/MovieOutput.swift | 6 ++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index e659c974..6b27694f 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -14,8 +14,11 @@ public class MovieInput: ImageSource { public var audioEncodingTarget:AudioEncodingTarget? { didSet { - guard let audioEncodingTarget = audioEncodingTarget else { - return + guard let audioEncodingTarget = audioEncodingTarget, + self.asset.tracks(withMediaType: AVMediaTypeAudio).count > 0 else { + // Make sure we don't activate the audio track if the asset doesn't have audio + // Otherwise the MovieOutput may wait for audio samples to complete the ideal interleaving pattern + return } audioEncodingTarget.activateAudioTrack() diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 65fb3a52..c229c693 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -132,6 +132,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.isRecording = true + self.synchronizedEncodingDebugPrint("MovieOutput started writing") + completionCallback?(true) } } catch { @@ -218,12 +220,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { do { try NSObject.catchException { if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { - debugPrint("Problem appending pixel buffer at time: \(frameTime)") + debugPrint("Trouble appending pixel buffer at time: \(frameTime)") } } } catch { - print("Trouble appending audio sample buffer: \(error)") + print("Trouble appending pixel buffer: \(error)") } CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) From ba55e2da5fcf1fa49156381e14b91c291a77371f Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Fri, 30 Mar 2018 13:20:53 -0700 Subject: [PATCH 058/332] Resolve dropped CVPixelBuffers in MovieOutput MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit https://developer.apple.com/documentation/avfoundation/avassetwriterinputpixelbufferadaptor/1388102-append Documentation: "Do not modify a CVPixelBuffer or its contents after you have passed it to this method." It must then be a requirement that for each frame we create a new pixel buffer with CVPixelBufferPoolCreatePixelBuffer(). Since we are not doing this, occasionally old pixel buffers are being reused. This is causing dropped frames - in the form of duplicates. These would mostly occur towards the beginning of videos and some would be interspersed throughout the rest of the video. I also noticed this problem impacted certain devices more so than others, usually older devices more than newer devices. With this resolved video encoding is now a 1-1 mapping of frames and 60fps video is supported without issue. https://github.com/BradLarson/GPUImage/issues/1501 --- framework/Source/BasicOperation.swift | 3 + framework/Source/Framebuffer.swift | 3 +- framework/Source/iOS/MovieInput.swift | 28 +++-- framework/Source/iOS/MovieOutput.swift | 135 +++++++++++++----------- framework/Source/iOS/PictureInput.swift | 4 +- framework/Source/iOS/RenderView.swift | 2 + 6 files changed, 101 insertions(+), 74 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 46dfc13f..dfcd0c07 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -148,6 +148,9 @@ open class BasicOperation: ImageProcessingOperation { remainingFramebuffers[key] = framebuffer } } + + renderFramebuffer.userInfo = inputFramebuffers[0]!.userInfo + inputFramebuffers = remainingFramebuffers } diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index f0e720e9..94a86fa7 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -46,7 +46,8 @@ public enum FramebufferTimingStyle { public class Framebuffer { public var timingStyle:FramebufferTimingStyle = .stillImage public var orientation:ImageOrientation - + public var userInfo:[AnyHashable:Any]? + public let texture:GLuint let framebuffer:GLuint? let stencilBuffer:GLuint? diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 6b27694f..ee9369cb 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -4,8 +4,6 @@ public protocol MovieInputDelegate: class { func didFinishMovie() } -let synchronizedEncodingDebug = false - public class MovieInput: ImageSource { public let targets = TargetContainer() public var runBenchmark = false @@ -14,11 +12,8 @@ public class MovieInput: ImageSource { public var audioEncodingTarget:AudioEncodingTarget? { didSet { - guard let audioEncodingTarget = audioEncodingTarget, - self.asset.tracks(withMediaType: AVMediaTypeAudio).count > 0 else { - // Make sure we don't activate the audio track if the asset doesn't have audio - // Otherwise the MovieOutput may wait for audio samples to complete the ideal interleaving pattern - return + guard let audioEncodingTarget = audioEncodingTarget else { + return } audioEncodingTarget.activateAudioTrack() @@ -54,6 +49,11 @@ public class MovieInput: ImageSource { self.enableSynchronizedEncoding() } } + public var synchronizedEncodingDebug = false { + didSet { + self.synchronizedMovieOutput?.synchronizedEncodingDebug = self.synchronizedEncodingDebug + } + } let conditionLock = NSCondition() var readingShouldWait = false var videoInputStatusObserver:NSKeyValueObservation? @@ -63,12 +63,13 @@ public class MovieInput: ImageSource { var timebaseInfo = mach_timebase_info_data_t() var currentThread:Thread? - var numberOfFramesCaptured = 0 + var totalFramesSent = 0 var totalFrameTimeDuringCapture:Double = 0.0 var audioSettings:[String:Any]? var movieFramebuffer:Framebuffer? + public var framebufferUserInfo:[AnyHashable:Any]? // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false, audioSettings:[String:Any]? = nil) throws { @@ -261,6 +262,7 @@ public class MovieInput: ImageSource { self.completion?() self.synchronizedEncodingDebugPrint("MovieInput finished reading") + self.synchronizedEncodingDebugPrint("MovieInput total frames sent: \(self.totalFramesSent)") } } } @@ -279,6 +281,7 @@ public class MovieInput: ImageSource { return } + self.synchronizedEncodingDebugPrint("Process frame input") var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) @@ -425,15 +428,19 @@ public class MovieInput: ImageSource { CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) + movieFramebuffer.userInfo = self.framebufferUserInfo self.movieFramebuffer = movieFramebuffer self.updateTargetsWithFramebuffer(movieFramebuffer) + if(self.runBenchmark || self.synchronizedEncodingDebug) { + self.totalFramesSent += 1 + } + if self.runBenchmark { let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) - self.numberOfFramesCaptured += 1 self.totalFrameTimeDuringCapture += currentFrameTime - print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured)) ms") + print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.totalFramesSent)) ms") print("Current frame time : \(1000.0 * currentFrameTime) ms") } } @@ -455,6 +462,7 @@ public class MovieInput: ImageSource { func enableSynchronizedEncoding() { self.synchronizedMovieOutput?.encodingLiveVideo = false + self.synchronizedMovieOutput?.synchronizedEncodingDebug = self.synchronizedEncodingDebug self.playAtActualSpeed = false self.loop = false diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index c229c693..1fb91fa8 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -40,9 +40,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let movieProcessingContext:OpenGLContext - public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, audioSettings:[String:Any]? = nil) throws { + var synchronizedEncodingDebug = false + var totalFramesAppended:Int = 0 + + public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil) throws { imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup - // Since we cannot access self before calling super, initialize this here and not above. let movieProcessingContext = OpenGLContext() if movieProcessingContext.supportsTextureCaches() { @@ -54,8 +56,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.size = size assetWriter = try AVAssetWriter(url:URL, fileType:fileType) - // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last 1/4 second should be lost in that case. - assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1, 1000) var localSettings:[String:Any] if let videoSettings = videoSettings { @@ -70,6 +70,19 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo + + // You should provide a naturalTimeScale if you have one for the current media. + // Otherwise the asset writer will choose one for you and it may result in misaligned frames. + if let naturalTimeScale = videoNaturalTimeScale { + assetWriter.movieTimeScale = naturalTimeScale + assetWriterVideoInput.mediaTimeScale = naturalTimeScale + // This is set to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. + assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1, naturalTimeScale) + } + else { + assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1, 1000) + } + encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. @@ -87,7 +100,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func startRecording(_ completionCallback:((_ started: Bool) -> Void)? = nil) { // Don't do this work on the movieProcessingContext que so we don't block it. - // If it does get blocked framebuffers will pile up and after it is no longer blocked/this work has finished + // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. DispatchQueue.global(qos: .utility).async { do { @@ -101,43 +114,24 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } guard let pixelBufferPool = self.assetWriterPixelBufferInput.pixelBufferPool else { - // When the pixelBufferPool returns nil, check the following: - // https://stackoverflow.com/a/20110179/1275014 + /* + When the pixelBufferPool returns nil, check the following: + 1. the the output file of the AVAssetsWriter doesn't exist. + 2. use the pixelbuffer after calling startSessionAtTime: on the AVAssetsWriter. + 3. the settings of AVAssetWriterInput and AVAssetWriterInputPixelBufferAdaptor are correct. + 4. the present times of appendPixelBuffer uses are not the same. + https://stackoverflow.com/a/20110179/1275014 + */ throw "Pixel buffer pool was nil" } + + self.isRecording = true - CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &self.pixelBuffer) - - guard let pixelBuffer = self.pixelBuffer else { - throw "Unable to create pixel buffer" - } - - /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion - * regardless of the kCVImageBufferYCbCrMatrixKey value. - * Tagging the resulting video file as BT.601, is the best option right now. - * Creating a proper BT.709 video is not possible at the moment. - */ - CVBufferSetAttachment(pixelBuffer, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) - CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) - CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) + self.synchronizedEncodingDebugPrint("MovieOutput started writing") - // This work must be done on the movieProcessingContext since we access openGL. - try self.movieProcessingContext.runOperationSynchronously { - let bufferSize = GLSize(self.size) - var cachedTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) - let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - - self.renderFramebuffer = try Framebuffer(context:self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) - - self.isRecording = true - - self.synchronizedEncodingDebugPrint("MovieOutput started writing") - - completionCallback?(true) - } + completionCallback?(true) } catch { - print("Unable to start recording: \(error)") + print("MovieOutput unable to start writing: \(error)") self.assetWriter.cancelWriting() @@ -160,8 +154,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.isRecording = false if let lastFrame = self.previousFrameTime { - // Resolve black frames at the end. If we only call finishWriting() the session's effective end time - // will be the latest end timestamp of the session's samples which could be either video or audio. + // Resolve black frames at the end. Without this the end timestamp of the session's samples could be either video or audio. + // Documentation: "You do not need to call this method; if you call finishWriting without + // calling this method, the session's effective end time will be the latest end timestamp of + // the session's samples (that is, no samples will be edited out at the end)." self.assetWriter.endSession(atSourceTime: lastFrame) } @@ -169,6 +165,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { completionCallback?() } self.synchronizedEncodingDebugPrint("MovieOutput finished writing") + self.synchronizedEncodingDebugPrint("MovieOutput total frames appended: \(self.totalFramesAppended)") } } @@ -197,7 +194,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.previousFrameTime = frameTime guard (self.assetWriterVideoInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { - debugPrint("Had to drop a frame at time \(frameTime)") + print("Had to drop a frame at time \(frameTime)") return } @@ -208,31 +205,34 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { usleep(100000) // 0.1 seconds } - if !self.movieProcessingContext.supportsTextureCaches() { - let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) - guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { return } + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) + guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { + print("WARNING: Unable to create pixel buffer, dropping frame") + return } - self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) - - self.synchronizedEncodingDebugPrint("Process frame output") - do { + try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) + + self.synchronizedEncodingDebugPrint("Process frame output") + try NSObject.catchException { if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { - debugPrint("Trouble appending pixel buffer at time: \(frameTime)") + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(self.assetWriter.error)") } } } catch { - print("Trouble appending pixel buffer: \(error)") + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") } - CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - if !self.movieProcessingContext.supportsTextureCaches() { - self.pixelBuffer = nil + if(self.synchronizedEncodingDebug) { + self.totalFramesAppended += 1 } + CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + self.pixelBuffer = nil + sharedImageProcessingContext.runOperationAsynchronously { framebuffer.unlock() } @@ -252,12 +252,21 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } - func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { - if !movieProcessingContext.supportsTextureCaches() { - renderFramebuffer = movieProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size)) - renderFramebuffer.lock() + func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) throws { + // Is this the first pixel buffer we have recieved? + if(renderFramebuffer == nil) { + CVBufferSetAttachment(pixelBuffer, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) } + let bufferSize = GLSize(self.size) + var cachedTextureRef:CVOpenGLESTexture? = nil + let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) + + renderFramebuffer = try? Framebuffer(context:self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) @@ -267,7 +276,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { glFinish() } else { glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) - renderFramebuffer.unlock() } } @@ -291,12 +299,15 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { guard self.isRecording, self.assetWriter.status == .writing, !self.audioEncodingIsFinished, - let assetWriterAudioInput = self.assetWriterAudioInput else { return } + let assetWriterAudioInput = self.assetWriterAudioInput else { + self.synchronizedEncodingDebugPrint("Guard fell through, dropping audio sample") + return + } let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) guard (assetWriterAudioInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { - debugPrint("Had to drop a audio sample at time \(currentSampleTime)") + print("Had to drop a audio sample at time \(currentSampleTime)") return } @@ -310,12 +321,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { do { try NSObject.catchException { if (!assetWriterAudioInput.append(sampleBuffer)) { - print("Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") } } } catch { - print("Trouble appending audio sample buffer: \(error)") + print("WARNING: Trouble appending audio sample buffer: \(error)") } } @@ -327,7 +338,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } - // Note: This is not used for synchronized encoding. + // Note: This is not used for synchronized encoding, only live video. public func readyForNextAudioBuffer() -> Bool { return true } diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 7bb513fd..03e6b387 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -4,6 +4,7 @@ import UIKit public class PictureInput: ImageSource { public let targets = TargetContainer() var imageFramebuffer:Framebuffer? + public var framebufferUserInfo:[AnyHashable:Any]? var hasProcessedImage:Bool = false public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { @@ -94,7 +95,6 @@ public class PictureInput: ImageSource { // TODO: Alter orientation based on metadata from photo self.imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) self.imageFramebuffer!.lock() - glBindTexture(GLenum(GL_TEXTURE_2D), self.imageFramebuffer!.texture) if (smoothlyScaleOutput) { @@ -131,6 +131,8 @@ public class PictureInput: ImageSource { } public func processImage(synchronously:Bool = false) { + self.imageFramebuffer?.userInfo = self.framebufferUserInfo + if synchronously { sharedImageProcessingContext.runOperationSynchronously{ if let framebuffer = self.imageFramebuffer { diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index c550c74e..4637ac39 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -184,6 +184,8 @@ public class RenderView:UIView, ImageConsumer { } } else { + self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + work() } } From b5efefd05a887f057ce91ec793836d872f72b08a Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sat, 31 Mar 2018 15:32:47 -0700 Subject: [PATCH 059/332] Properly conforming Swift errors --- framework/Source/Operations/ImageBuffer.swift | 2 +- framework/Source/iOS/MovieInput.swift | 2 +- framework/Source/iOS/MovieOutput.swift | 32 +++++++++++++------ framework/Source/iOS/PictureInput.swift | 27 ++++++++++++++-- 4 files changed, 49 insertions(+), 14 deletions(-) diff --git a/framework/Source/Operations/ImageBuffer.swift b/framework/Source/Operations/ImageBuffer.swift index 29e1100e..6b5b5675 100644 --- a/framework/Source/Operations/ImageBuffer.swift +++ b/framework/Source/Operations/ImageBuffer.swift @@ -10,7 +10,7 @@ public class ImageBuffer: ImageProcessingOperation { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { bufferedFramebuffers.append(framebuffer) - if (bufferedFramebuffers.count >= Int(bufferSize)) { + if (bufferedFramebuffers.count > Int(bufferSize)) { let releasedFramebuffer = bufferedFramebuffers.removeFirst() updateTargetsWithFramebuffer(releasedFramebuffer) releasedFramebuffer.unlock() diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index ee9369cb..eea259d4 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -184,7 +184,7 @@ public class MovieInput: ImageSource { thread.qualityOfService = .userInitiated } else { - // This includes syncronized encoding since the above vars will be disabled for it. + // This includes synchronized encoding since the above vars will be disabled for it. thread.qualityOfService = .default } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 1fb91fa8..5cbb2180 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -1,9 +1,5 @@ import AVFoundation -extension String: LocalizedError { - public var errorDescription: String? { return self } -} - public protocol AudioEncodingTarget { func activateAudioTrack() func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) @@ -11,6 +7,24 @@ public protocol AudioEncodingTarget { func readyForNextAudioBuffer() -> Bool } +enum MovieOutputError: Error, CustomStringConvertible { + case startWritingError(assetWriterError: Error?) + case pixelBufferPoolNilError + + public var errorDescription: String { + switch self { + case .startWritingError(let assetWriterError): + return "Could not start asset writer: \(String(describing: assetWriterError))" + case .pixelBufferPoolNilError: + return "Asset writer pixel buffer pool was nil. Make sure that your output file doesn't already exist." + } + } + + public var description: String { + return "<\(type(of: self)): errorDescription = \(self.errorDescription)>" + } +} + public class MovieOutput: ImageConsumer, AudioEncodingTarget { public let sources = SourceContainer() @@ -99,7 +113,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func startRecording(_ completionCallback:((_ started: Bool) -> Void)? = nil) { - // Don't do this work on the movieProcessingContext que so we don't block it. + // Don't do this work on the movieProcessingContext queue so we don't block it. // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. DispatchQueue.global(qos: .utility).async { @@ -110,7 +124,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if(!success) { - throw "Could not start asset writer: \(String(describing: self.assetWriter.error))" + throw MovieOutputError.startWritingError(assetWriterError: self.assetWriter.error) } guard let pixelBufferPool = self.assetWriterPixelBufferInput.pixelBufferPool else { @@ -122,7 +136,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { 4. the present times of appendPixelBuffer uses are not the same. https://stackoverflow.com/a/20110179/1275014 */ - throw "Pixel buffer pool was nil" + throw MovieOutputError.pixelBufferPoolNilError } self.isRecording = true @@ -246,8 +260,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } else { // This is done synchronously to prevent framebuffers from piling up during synchronized encoding. - // If we don't force the sharedImageProcessingContext que to wait for this frame to finish processing it will - // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext que would run when the system wants it to. + // If we don't force the sharedImageProcessingContext queue to wait for this frame to finish processing it will + // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext queue would run when the system wants it to. movieProcessingContext.runOperationSynchronously(work) } } diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 03e6b387..71db1e83 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -1,6 +1,27 @@ import OpenGLES import UIKit +public enum PictureInputError: Error, CustomStringConvertible { + case zeroSizedImageError + case dataProviderNilError + case noSuchImageError(imageName: String) + + public var errorDescription: String { + switch self { + case .zeroSizedImageError: + return "Tried to pass in a zero-sized image" + case .dataProviderNilError: + return "Unable to retrieve image dataProvider" + case .noSuchImageError(let imageName): + return "No such image named: \(imageName) in your application bundle" + } + } + + public var description: String { + return "<\(type(of: self)): errorDescription = \(self.errorDescription)>" + } +} + public class PictureInput: ImageSource { public let targets = TargetContainer() var imageFramebuffer:Framebuffer? @@ -12,7 +33,7 @@ public class PictureInput: ImageSource { let heightOfImage = GLint(image.height) // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. - guard((widthOfImage > 0) && (heightOfImage > 0)) else { throw "Tried to pass in a zero-sized image" } + guard((widthOfImage > 0) && (heightOfImage > 0)) else { throw PictureInputError.zeroSizedImageError } var widthToUseForTexture = widthOfImage var heightToUseForTexture = heightOfImage @@ -87,7 +108,7 @@ public class PictureInput: ImageSource { imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) } else { // Access the raw image bytes directly - guard let data = image.dataProvider?.data else { throw "Unable to retrieve image dataProvider" } + guard let data = image.dataProvider?.data else { throw PictureInputError.dataProviderNilError } dataFromImageDataProvider = data imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)) } @@ -120,7 +141,7 @@ public class PictureInput: ImageSource { } public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { - guard let image = UIImage(named:imageName) else { throw "No such image named: \(imageName) in your application bundle" } + guard let image = UIImage(named:imageName) else { throw PictureInputError.noSuchImageError(imageName: imageName) } try self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } From c418e38d80019371209ecb2c3277c1e40896622c Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sat, 31 Mar 2018 15:38:36 -0700 Subject: [PATCH 060/332] Fix throw typo --- framework/Source/iOS/MovieOutput.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 5cbb2180..8049f341 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -279,7 +279,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - renderFramebuffer = try? Framebuffer(context:self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + renderFramebuffer = try Framebuffer(context:self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) From 6e0979ea8cacc5f05c127a2c85dacb9ae20df912 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 1 Apr 2018 15:16:54 -0700 Subject: [PATCH 061/332] Cleanup --- framework/Source/iOS/Camera.swift | 2 -- framework/Source/iOS/MovieOutput.swift | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 05328bbb..f5761805 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -54,8 +54,6 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var audioEncodingTarget:AudioEncodingTarget? { didSet { guard let audioEncodingTarget = audioEncodingTarget else { - // Removing the audio inputs and outputs causes a black flash on the video output - //self.removeAudioInputsAndOutputs() return } do { diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 8049f341..87280118 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -127,7 +127,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { throw MovieOutputError.startWritingError(assetWriterError: self.assetWriter.error) } - guard let pixelBufferPool = self.assetWriterPixelBufferInput.pixelBufferPool else { + guard self.assetWriterPixelBufferInput.pixelBufferPool != nil else { /* When the pixelBufferPool returns nil, check the following: 1. the the output file of the AVAssetsWriter doesn't exist. @@ -232,7 +232,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { try NSObject.catchException { if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(self.assetWriter.error)") + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") } } } From b4eec914253421cd77bd7bb049d1c430a2d270ea Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 1 Apr 2018 16:53:46 -0700 Subject: [PATCH 062/332] Update existing iOS example projects and README --- README.md | 26 +++++--- .../FilterShowcase/FilterOperations.swift | 4 +- .../FilterShowcase.xcodeproj/project.pbxproj | 6 +- .../FilterDisplayViewController.swift | 2 +- .../project.pbxproj | 6 +- .../SimpleImageFilter/ViewController.swift | 17 ++++- .../project.pbxproj | 22 ++++++- .../Base.lproj/Main.storyboard | 65 ++++++++++++++++--- .../SimpleMovieFilter/ViewController.swift | 31 ++++++++- .../project.pbxproj | 6 +- .../project.pbxproj | 6 +- .../SimpleVideoRecorder/ViewController.swift | 23 ++++++- framework/Source/iOS/MovieInput.swift | 4 +- framework/Source/iOS/SpeakerOutput.swift | 4 +- 14 files changed, 177 insertions(+), 45 deletions(-) diff --git a/README.md b/README.md index 89c2d95d..3612fba2 100755 --- a/README.md +++ b/README.md @@ -152,7 +152,7 @@ There are a few different ways to approach filtering an image. The easiest are t ```swift let testImage = UIImage(named:"WID-small.jpg")! let toonFilter = SmoothToonFilter() -let filteredImage = testImage.filterWithOperation(toonFilter) +let filteredImage = try! testImage.filterWithOperation(toonFilter) ``` for a more complex pipeline: @@ -161,7 +161,7 @@ for a more complex pipeline: let testImage = UIImage(named:"WID-small.jpg")! let toonFilter = SmoothToonFilter() let luminanceFilter = Luminance() -let filteredImage = testImage.filterWithPipeline{input, output in +let filteredImage = try! testImage.filterWithPipeline{input, output in input --> toonFilter --> luminanceFilter --> output } ``` @@ -173,7 +173,7 @@ Both of these convenience methods wrap several operations. To feed a picture int ```swift let toonFilter = SmoothToonFilter() let testImage = UIImage(named:"WID-small.jpg")! -let pictureInput = PictureInput(image:testImage) +let pictureInput = try! PictureInput(image:testImage) let pictureOutput = PictureOutput() pictureOutput.imageAvailableCallback = {image in // Do something with image @@ -186,24 +186,34 @@ In the above, the imageAvailableCallback will be triggered right at the processI ### Filtering and re-encoding a movie ### -To filter an existing movie file, you can write code like the following: +To filter and playback an existing movie file, you can write code like the following: ```swift do { - let bundleURL = Bundle.main.resourceURL! - let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! - movie = try MovieInput(url:movieURL, playAtActualSpeed:true) + let bundleURL = Bundle.main.resourceURL! + let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + + let audioDecodeSettings = [AVFormatIDKey: kAudioFormatLinearPCM] + + movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings: audioDecodeSettings) + speaker = SpeakerOutput() + movie.audioEncodingTarget = speaker + filter = SaturationAdjustment() movie --> filter --> renderView + movie.start() + speaker.start() } catch { - fatalError("Could not initialize rendering pipeline: \(error)") + print("Couldn't process movie with error: \(error)") } ``` where renderView is an instance of RenderView that you've placed somewhere in your view hierarchy. The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a saturation filter, and directs movie frames to be processed through the saturation filter on their way to the screen. start() initiates the movie playback. + + ### Writing a custom image processing operation ### The framework uses a series of protocols to define types that can output images to be processed, take in an image for processing, or do both. These are the ImageSource, ImageConsumer, and ImageProcessingOperation protocols, respectively. Any type can comply to these, but typically classes are used. diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift index 234cdfd7..3440e17f 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift @@ -172,7 +172,7 @@ let filterOperations: Array = [ sliderUpdateCallback: nil, filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! Luminance - let maskImage = PictureInput(imageName:"Mask.png") + let maskImage = try! PictureInput(imageName:"Mask.png") castFilter.drawUnmodifiedImageOutsideOfMask = false castFilter.mask = maskImage maskImage.processImage() @@ -739,7 +739,7 @@ let filterOperations: Array = [ let blendFilter = AlphaBlend() blendFilter.mix = 1.0 - let inputImage = PictureInput(imageName:blendImageName) + let inputImage = try! PictureInput(imageName:blendImageName) inputImage --> blendFilter camera --> castFilter --> blendFilter --> outputView diff --git a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj index 5358e00c..8f8405f8 100644 --- a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj +++ b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj @@ -157,7 +157,7 @@ isa = PBXGroup; children = ( BC9E364D1E525A3200B8604F /* GPUImage.framework */, - BC9E364F1E525A3200B8604F /* GPUImage.xctest */, + BC9E364F1E525A3200B8604F /* GPUImageTests_macOS.xctest */, BC9E36511E525A3200B8604F /* GPUImage.framework */, BC9E36531E525A3200B8604F /* GPUImageTests_iOS.xctest */, ); @@ -246,10 +246,10 @@ remoteRef = BC9E364C1E525A3200B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E364F1E525A3200B8604F /* GPUImage.xctest */ = { + BC9E364F1E525A3200B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E364E1E525A3200B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift index b98b61bb..00323443 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift @@ -45,7 +45,7 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega currentFilterConfiguration.filter.addTarget(view) case .blend: videoCamera.addTarget(currentFilterConfiguration.filter) - self.blendImage = PictureInput(imageName:blendImageName) + self.blendImage = try? PictureInput(imageName:blendImageName) self.blendImage?.addTarget(currentFilterConfiguration.filter) self.blendImage?.processImage() currentFilterConfiguration.filter.addTarget(view) diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj index 6fb5dd86..2f0afb57 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj @@ -94,7 +94,7 @@ isa = PBXGroup; children = ( BC9E36601E525B5B00B8604F /* GPUImage.framework */, - BC9E36621E525B5B00B8604F /* GPUImage.xctest */, + BC9E36621E525B5B00B8604F /* GPUImageTests_macOS.xctest */, BC9E36641E525B5B00B8604F /* GPUImage.framework */, BC9E36661E525B5B00B8604F /* GPUImageTests_iOS.xctest */, ); @@ -219,10 +219,10 @@ remoteRef = BC9E365F1E525B5B00B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E36621E525B5B00B8604F /* GPUImage.xctest */ = { + BC9E36621E525B5B00B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E36611E525B5B00B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift index 7980b1dc..a2c52890 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift @@ -14,7 +14,14 @@ class ViewController: UIViewController { // Filtering image for saving let testImage = UIImage(named:"WID-small.jpg")! let toonFilter = SmoothToonFilter() - let filteredImage = testImage.filterWithOperation(toonFilter) + + let filteredImage:UIImage + do { + filteredImage = try testImage.filterWithOperation(toonFilter) + } catch { + print("Couldn't filter image with error: \(error)") + return + } let pngImage = UIImagePNGRepresentation(filteredImage)! do { @@ -25,8 +32,14 @@ class ViewController: UIViewController { print("Couldn't write to file with error: \(error)") } + // Filtering image for display - picture = PictureInput(image:UIImage(named:"WID-small.jpg")!) + do { + picture = try PictureInput(image:UIImage(named:"WID-small.jpg")!) + } catch { + print("Couldn't create PictureInput with error: \(error)") + return + } filter = SaturationAdjustment() picture --> filter --> renderView picture.processImage() diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj index d570a4a3..287388e5 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj @@ -7,6 +7,8 @@ objects = { /* Begin PBXBuildFile section */ + 1FDF369F2071965100089948 /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1FDF369E2071965100089948 /* CoreAudio.framework */; }; + 1FDF36A12071966B00089948 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1FDF36A02071966B00089948 /* AVFoundation.framework */; }; BC9E367C1E525BCF00B8604F /* GPUImage.framework in CopyFiles */ = {isa = PBXBuildFile; fileRef = BC9E36771E525BC000B8604F /* GPUImage.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; BCC49F931CD6E1D800B63EEB /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC49F921CD6E1D800B63EEB /* AppDelegate.swift */; }; BCC49F951CD6E1D800B63EEB /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC49F941CD6E1D800B63EEB /* ViewController.swift */; }; @@ -67,6 +69,8 @@ /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ + 1FDF369E2071965100089948 /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = System/Library/Frameworks/CoreAudio.framework; sourceTree = SDKROOT; }; + 1FDF36A02071966B00089948 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; BC9E366B1E525BC000B8604F /* GPUImage.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = GPUImage.xcodeproj; path = ../../../../framework/GPUImage.xcodeproj; sourceTree = ""; }; BCC49F8F1CD6E1D800B63EEB /* SimpleMovieFilter.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SimpleMovieFilter.app; sourceTree = BUILT_PRODUCTS_DIR; }; BCC49F921CD6E1D800B63EEB /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; @@ -82,17 +86,28 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 1FDF36A12071966B00089948 /* AVFoundation.framework in Frameworks */, + 1FDF369F2071965100089948 /* CoreAudio.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 1FDF369D2071965000089948 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 1FDF36A02071966B00089948 /* AVFoundation.framework */, + 1FDF369E2071965100089948 /* CoreAudio.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; BC9E366C1E525BC000B8604F /* Products */ = { isa = PBXGroup; children = ( BC9E36731E525BC000B8604F /* GPUImage.framework */, - BC9E36751E525BC000B8604F /* GPUImage.xctest */, + BC9E36751E525BC000B8604F /* GPUImageTests_macOS.xctest */, BC9E36771E525BC000B8604F /* GPUImage.framework */, BC9E36791E525BC000B8604F /* GPUImageTests_iOS.xctest */, ); @@ -105,6 +120,7 @@ BCC49F911CD6E1D800B63EEB /* Application */, BCC49FA41CD6E1E300B63EEB /* Resources */, BCC49F901CD6E1D800B63EEB /* Products */, + 1FDF369D2071965000089948 /* Frameworks */, ); sourceTree = ""; }; @@ -209,10 +225,10 @@ remoteRef = BC9E36721E525BC000B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E36751E525BC000B8604F /* GPUImage.xctest */ = { + BC9E36751E525BC000B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E36741E525BC000B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard index 67c04ee8..769960fe 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard @@ -1,8 +1,13 @@ - - + + + + + - + + + @@ -14,18 +19,61 @@ - + - - - + + + + + + + + + - + + + + + + + + @@ -34,6 +82,7 @@ + diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift index 173b6b8a..e1e79be2 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift @@ -1,5 +1,7 @@ import UIKit import GPUImage +import CoreAudio +import AVFoundation class ViewController: UIViewController { @@ -7,19 +9,27 @@ class ViewController: UIViewController { var movie:MovieInput! var filter:Pixellate! + var speaker:SpeakerOutput! - override func viewDidLayoutSubviews() { - super.viewDidLayoutSubviews() + override func viewDidLoad() { + super.viewDidLoad() let bundleURL = Bundle.main.resourceURL! let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! do { - movie = try MovieInput(url:movieURL, playAtActualSpeed:true) + let audioDecodeSettings = [AVFormatIDKey: kAudioFormatLinearPCM] + + movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings: audioDecodeSettings) + speaker = SpeakerOutput() + movie.audioEncodingTarget = speaker + filter = Pixellate() movie --> filter --> renderView movie.runBenchmark = true + movie.start() + speaker.start() } catch { print("Couldn't process movie with error: \(error)") } @@ -28,5 +38,20 @@ class ViewController: UIViewController { // let fileURL = NSURL(string:"test.png", relativeToURL:documentsDir)! // try pngImage.writeToURL(fileURL, options:.DataWritingAtomic) } + + @IBAction func pause() { + movie.pause() + speaker.cancel() + } + + @IBAction func cancel() { + movie.cancel() + speaker.cancel() + } + + @IBAction func play() { + movie.start() + speaker.start() + } } diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj index 757f18c9..1f49e1f5 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj @@ -92,7 +92,7 @@ isa = PBXGroup; children = ( BC9E36861E525C2A00B8604F /* GPUImage.framework */, - BC9E36881E525C2A00B8604F /* GPUImage.xctest */, + BC9E36881E525C2A00B8604F /* GPUImageTests_macOS.xctest */, BC9E368A1E525C2A00B8604F /* GPUImage.framework */, BC9E368C1E525C2A00B8604F /* GPUImageTests_iOS.xctest */, ); @@ -208,10 +208,10 @@ remoteRef = BC9E36851E525C2A00B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E36881E525C2A00B8604F /* GPUImage.xctest */ = { + BC9E36881E525C2A00B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E36871E525C2A00B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj index 937e99bf..c977e0c6 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj @@ -92,7 +92,7 @@ isa = PBXGroup; children = ( BC9E36991E525C9900B8604F /* GPUImage.framework */, - BC9E369B1E525C9900B8604F /* GPUImage.xctest */, + BC9E369B1E525C9900B8604F /* GPUImageTests_macOS.xctest */, BC9E369D1E525C9900B8604F /* GPUImage.framework */, BC9E369F1E525C9900B8604F /* GPUImageTests_iOS.xctest */, ); @@ -209,10 +209,10 @@ remoteRef = BC9E36981E525C9900B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; - BC9E369B1E525C9900B8604F /* GPUImage.xctest */ = { + BC9E369B1E525C9900B8604F /* GPUImageTests_macOS.xctest */ = { isa = PBXReferenceProxy; fileType = wrapper.cfbundle; - path = GPUImage.xctest; + path = GPUImageTests_macOS.xctest; remoteRef = BC9E369A1E525C9900B8604F /* PBXContainerItemProxy */; sourceTree = BUILT_PRODUCTS_DIR; }; diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift index 82ed237e..bebd5be4 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift @@ -38,10 +38,29 @@ class ViewController: UIViewController { } catch { } - movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), liveVideo:true) + // Do this now so we can access the audioOutput recommendedAudioSettings before initializing the MovieOutput + do { + try self.camera.addAudioInputsAndOutputs() + } catch { + fatalError("ERROR: Could not connect audio target with error: \(error)") + } + + let audioSettings = self.camera!.audioOutput?.recommendedAudioSettingsForAssetWriter(withOutputFileType: AVFileTypeMPEG4) as? [String : Any] + var videoSettings: [String : Any]? = nil + if #available(iOS 11.0, *) { + videoSettings = self.camera!.videoOutput.recommendedVideoSettings(forVideoCodecType: .h264, assetWriterOutputFileType: AVFileTypeMPEG4) as? [String : Any] + videoSettings![AVVideoWidthKey] = nil + videoSettings![AVVideoHeightKey] = nil + } + + movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), fileType: AVFileTypeMPEG4, liveVideo: true, videoSettings: videoSettings, audioSettings: audioSettings) camera.audioEncodingTarget = movieOutput filter --> movieOutput! - movieOutput!.startRecording() + movieOutput!.startRecording() { started in + if(!started) { + self.isRecording = false + } + } DispatchQueue.main.async { // Label not updating on the main thread, for some reason, so dispatching slightly after this (sender as! UIButton).titleLabel!.text = "Stop" diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index eea259d4..4d815606 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -81,10 +81,10 @@ public class MovieInput: ImageSource { self.audioSettings = audioSettings } - public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false, audioSettings:[String:Any]? = nil) throws { let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] let inputAsset = AVURLAsset(url:url, options:inputOptions) - try self.init(asset:inputAsset, videoComposition: nil, playAtActualSpeed:playAtActualSpeed, loop:loop) + try self.init(asset:inputAsset, videoComposition: nil, playAtActualSpeed:playAtActualSpeed, loop:loop, audioSettings:audioSettings) } deinit { diff --git a/framework/Source/iOS/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift index 62061d2b..03e00624 100644 --- a/framework/Source/iOS/SpeakerOutput.swift +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -60,7 +60,7 @@ public class SpeakerOutput: AudioEncodingTarget { } TPCircularBufferCleanup(&circularBuffer) - self.stop() + self.cancel() } // MARK: - @@ -74,7 +74,7 @@ public class SpeakerOutput: AudioEncodingTarget { isPlaying = true } - public func stop() { + public func cancel() { if(!isPlaying || processingGraph == nil) { return } AUGraphStop(processingGraph!) From 78632aaf919c1c602b588a33764d70b007c2cf17 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 1 Apr 2018 23:08:35 -0700 Subject: [PATCH 063/332] Add syncronized video encoding Xcode example and README example --- README.md | 88 +++- .../AppIcon.appiconset/Contents.json | 25 + .../project.pbxproj | 467 ++++++++++++++++++ .../SimpleMovieEncoding/AppDelegate.swift | 46 ++ .../AppIcon.appiconset/Contents.json | 93 ++++ .../Base.lproj/LaunchScreen.storyboard | 25 + .../Base.lproj/Main.storyboard | 44 ++ .../SimpleMovieEncoding/Info.plist | 45 ++ .../SimpleMovieEncoding/ViewController.swift | 118 +++++ .../lookup_miss_etikate.png | Bin 0 -> 202596 bytes .../project.pbxproj | 4 + .../SimpleMovieFilter/ViewController.swift | 4 +- .../SimpleVideoRecorder/ViewController.swift | 8 +- .../Source/Operations/AmatorkaFilter.swift | 7 +- .../Source/Operations/MissEtikateFilter.swift | 7 +- .../Source/Operations/SoftElegance.swift | 9 +- framework/Source/iOS/MovieOutput.swift | 16 +- 17 files changed, 986 insertions(+), 20 deletions(-) create mode 100644 examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj create mode 100644 examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift create mode 100644 examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json create mode 100644 examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard create mode 100644 examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard create mode 100644 examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist create mode 100644 examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift create mode 100644 examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/lookup_miss_etikate.png diff --git a/README.md b/README.md index 3612fba2..cdfa34f9 100755 --- a/README.md +++ b/README.md @@ -194,9 +194,9 @@ do { let bundleURL = Bundle.main.resourceURL! let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! - let audioDecodeSettings = [AVFormatIDKey: kAudioFormatLinearPCM] + let audioDecodeSettings = [AVFormatIDKey:kAudioFormatLinearPCM] - movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings: audioDecodeSettings) + movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings:audioDecodeSettings) speaker = SpeakerOutput() movie.audioEncodingTarget = speaker @@ -212,8 +212,92 @@ do { where renderView is an instance of RenderView that you've placed somewhere in your view hierarchy. The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a saturation filter, and directs movie frames to be processed through the saturation filter on their way to the screen. start() initiates the movie playback. +To filter an existing movie file and save the result to a new movie file you can write code like the following: +```swift + let bundleURL = Bundle.main.resourceURL! + // The movie you want to reencode + let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + + let documentsDir = FileManager().urls(for: .documentDirectory, in: .userDomainMask).first! + // The location you want to save the new video + let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! + + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] + let asset = AVURLAsset(url:movieURL, options:inputOptions) + + guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } + let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first + + // If you would like passthrough audio instead, use nil for both audioDecodingSettings and audioEncodingSettings + let audioDecodingSettings:[String:Any] = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples + + do { + movieInput = try MovieInput(asset: asset, videoComposition: nil, playAtActualSpeed: false, loop: false, audioSettings: audioDecodingSettings) + } + catch { + print("ERROR: Unable to setup MovieInput with error: \(error)") + return + } + + try? FileManager().removeItem(at: exportedURL) + + let videoEncodingSettings:[String:Any] = [AVVideoCompressionPropertiesKey: [ + AVVideoExpectedSourceFrameRateKey: videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey: videoTrack.estimatedDataRate, + AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey: videoTrack.requiresFrameReordering], + AVVideoCodecKey: AVVideoCodecH264] + + var acl = AudioChannelLayout() + memset(&acl, 0, MemoryLayout.size) + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo + + let audioEncodingSettings:[String:Any] = [ + AVFormatIDKey:kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey:2, + AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), + AVEncoderBitRateKey:96000 + ] + + do { + movieOutput = try MovieOutput(URL: exportedURL, size: Size(width: Float(videoTrack.naturalSize.width), height: Float(videoTrack.naturalSize.height)), fileType: AVFileType.mp4.rawValue, liveVideo: false, videoSettings: videoEncodingSettings, videoNaturalTimeScale: videoTrack.naturalTimeScale, audioSettings: audioEncodingSettings) + } + catch { + print("ERROR: Unable to setup MovieOutput with error: \(error)") + return + } + + filter = MissEtikateFilter() + + if(audioTrack != nil) { movieInput.audioEncodingTarget = movieOutput } + movieInput.synchronizedMovieOutput = movieOutput + //movieInput.synchronizedEncodingDebug = true + movieInput --> filter --> movieOutput + + movieInput.completion = { + self.movieOutput.finishRecording { + DispatchQueue.main.async { + print("Encoding finished") + } + } + } + + movieOutput.startRecording() { started, error in + if(!started) { + print("ERROR: MovieOutput unable to start writing: \(String(describing: error))") + return + } + self.movieInput.start() + print("Encoding started") + } +``` + + The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a lookup filter (Miss Etikate), and directs movie frames to be processed through the lookup filter on their way to the new file. + ### Writing a custom image processing operation ### The framework uses a series of protocols to define types that can output images to be processed, take in an image for processing, or do both. These are the ImageSource, ImageConsumer, and ImageProcessingOperation protocols, respectively. Any type can comply to these, but typically classes are used. diff --git a/examples/SharedAssets/Assets-iOS.xcassets/AppIcon.appiconset/Contents.json b/examples/SharedAssets/Assets-iOS.xcassets/AppIcon.appiconset/Contents.json index 1ce2f457..71356ed4 100644 --- a/examples/SharedAssets/Assets-iOS.xcassets/AppIcon.appiconset/Contents.json +++ b/examples/SharedAssets/Assets-iOS.xcassets/AppIcon.appiconset/Contents.json @@ -1,5 +1,15 @@ { "images" : [ + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "3x" + }, { "size" : "29x29", "idiom" : "iphone", @@ -36,6 +46,16 @@ "filename" : "Icon-180.png", "scale" : "3x" }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "2x" + }, { "size" : "29x29", "idiom" : "ipad", @@ -77,6 +97,11 @@ "idiom" : "ipad", "filename" : "Icon-167.png", "scale" : "2x" + }, + { + "idiom" : "ios-marketing", + "size" : "1024x1024", + "scale" : "1x" } ], "info" : { diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj new file mode 100644 index 00000000..6aae7e00 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj @@ -0,0 +1,467 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 48; + objects = { + +/* Begin PBXBuildFile section */ + 1F2393442071C12C001886DD /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F2393432071C12C001886DD /* AppDelegate.swift */; }; + 1F2393462071C12C001886DD /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F2393452071C12C001886DD /* ViewController.swift */; }; + 1F2393492071C12C001886DD /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1F2393472071C12C001886DD /* Main.storyboard */; }; + 1F23934B2071C12C001886DD /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1F23934A2071C12C001886DD /* Assets.xcassets */; }; + 1F23934E2071C12C001886DD /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1F23934C2071C12C001886DD /* LaunchScreen.storyboard */; }; + 1F2393662071C169001886DD /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F2393652071C169001886DD /* AVFoundation.framework */; }; + 1F2393682071C16D001886DD /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F2393672071C16D001886DD /* CoreAudio.framework */; }; + 1F23936D2071C2DB001886DD /* sample_iPod.m4v in Resources */ = {isa = PBXBuildFile; fileRef = 1F23936C2071C2DB001886DD /* sample_iPod.m4v */; }; + 1F2393772071F51C001886DD /* GPUImage.framework in CopyFiles */ = {isa = PBXBuildFile; fileRef = 1F2393612071C155001886DD /* GPUImage.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; + 1F2393792071FCB1001886DD /* Assets-iOS.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1F2393782071FCB1001886DD /* Assets-iOS.xcassets */; }; + 1F23937B2071FCDB001886DD /* lookup_miss_etikate.png in Resources */ = {isa = PBXBuildFile; fileRef = 1F23937A2071FCDA001886DD /* lookup_miss_etikate.png */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + 1F23935C2071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC6E7CAB1C39A9D8006DF678; + remoteInfo = GPUImage_macOS; + }; + 1F23935E2071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC6E7CB51C39A9D8006DF678; + remoteInfo = GPUImageTests_macOS; + }; + 1F2393602071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC9E34E91E524A2200B8604F; + remoteInfo = GPUImage_iOS; + }; + 1F2393622071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC9E34F11E524A2200B8604F; + remoteInfo = GPUImageTests_iOS; + }; + 1F23936A2071C29D001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 1; + remoteGlobalIDString = BC9E34E81E524A2200B8604F; + remoteInfo = GPUImage_iOS; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 1F2393762071F506001886DD /* CopyFiles */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + 1F2393772071F51C001886DD /* GPUImage.framework in CopyFiles */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + 1F2393402071C12C001886DD /* SimpleMovieEncoding.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SimpleMovieEncoding.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 1F2393432071C12C001886DD /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + 1F2393452071C12C001886DD /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; + 1F2393482071C12C001886DD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 1F23934A2071C12C001886DD /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 1F23934D2071C12C001886DD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 1F23934F2071C12C001886DD /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 1F2393552071C155001886DD /* GPUImage.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = GPUImage.xcodeproj; path = ../../../../framework/GPUImage.xcodeproj; sourceTree = ""; }; + 1F2393652071C169001886DD /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; + 1F2393672071C16D001886DD /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = System/Library/Frameworks/CoreAudio.framework; sourceTree = SDKROOT; }; + 1F23936C2071C2DB001886DD /* sample_iPod.m4v */ = {isa = PBXFileReference; lastKnownFileType = file; name = sample_iPod.m4v; path = ../../../SharedAssets/sample_iPod.m4v; sourceTree = ""; }; + 1F2393782071FCB1001886DD /* Assets-iOS.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = "Assets-iOS.xcassets"; path = "../../../SharedAssets/Assets-iOS.xcassets"; sourceTree = ""; }; + 1F23937A2071FCDA001886DD /* lookup_miss_etikate.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = lookup_miss_etikate.png; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 1F23933D2071C12C001886DD /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 1F2393682071C16D001886DD /* CoreAudio.framework in Frameworks */, + 1F2393662071C169001886DD /* AVFoundation.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 1F2393372071C12C001886DD = { + isa = PBXGroup; + children = ( + 1F2393422071C12C001886DD /* SimpleMovieEncoding */, + 1F2393412071C12C001886DD /* Products */, + 1F2393642071C169001886DD /* Frameworks */, + ); + sourceTree = ""; + }; + 1F2393412071C12C001886DD /* Products */ = { + isa = PBXGroup; + children = ( + 1F2393402071C12C001886DD /* SimpleMovieEncoding.app */, + ); + name = Products; + sourceTree = ""; + }; + 1F2393422071C12C001886DD /* SimpleMovieEncoding */ = { + isa = PBXGroup; + children = ( + 1F2393432071C12C001886DD /* AppDelegate.swift */, + 1F2393452071C12C001886DD /* ViewController.swift */, + 1F2393472071C12C001886DD /* Main.storyboard */, + 1F2393552071C155001886DD /* GPUImage.xcodeproj */, + 1F23934A2071C12C001886DD /* Assets.xcassets */, + 1F2393782071FCB1001886DD /* Assets-iOS.xcassets */, + 1F23934C2071C12C001886DD /* LaunchScreen.storyboard */, + 1F23937A2071FCDA001886DD /* lookup_miss_etikate.png */, + 1F23936C2071C2DB001886DD /* sample_iPod.m4v */, + 1F23934F2071C12C001886DD /* Info.plist */, + ); + path = SimpleMovieEncoding; + sourceTree = ""; + }; + 1F2393562071C155001886DD /* Products */ = { + isa = PBXGroup; + children = ( + 1F23935D2071C155001886DD /* GPUImage.framework */, + 1F23935F2071C155001886DD /* GPUImageTests_macOS.xctest */, + 1F2393612071C155001886DD /* GPUImage.framework */, + 1F2393632071C155001886DD /* GPUImageTests_iOS.xctest */, + ); + name = Products; + sourceTree = ""; + }; + 1F2393642071C169001886DD /* Frameworks */ = { + isa = PBXGroup; + children = ( + 1F2393672071C16D001886DD /* CoreAudio.framework */, + 1F2393652071C169001886DD /* AVFoundation.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 1F23933F2071C12C001886DD /* SimpleMovieEncoding */ = { + isa = PBXNativeTarget; + buildConfigurationList = 1F2393522071C12C001886DD /* Build configuration list for PBXNativeTarget "SimpleMovieEncoding" */; + buildPhases = ( + 1F23933C2071C12C001886DD /* Sources */, + 1F23933D2071C12C001886DD /* Frameworks */, + 1F23933E2071C12C001886DD /* Resources */, + 1F2393762071F506001886DD /* CopyFiles */, + ); + buildRules = ( + ); + dependencies = ( + 1F23936B2071C29D001886DD /* PBXTargetDependency */, + ); + name = SimpleMovieEncoding; + productName = SimpleMovieEncoding; + productReference = 1F2393402071C12C001886DD /* SimpleMovieEncoding.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 1F2393382071C12C001886DD /* Project object */ = { + isa = PBXProject; + attributes = { + LastSwiftUpdateCheck = 0920; + LastUpgradeCheck = 0920; + ORGANIZATIONNAME = "Sunset Lake Software LLC"; + TargetAttributes = { + 1F23933F2071C12C001886DD = { + CreatedOnToolsVersion = 9.2; + ProvisioningStyle = Automatic; + }; + }; + }; + buildConfigurationList = 1F23933B2071C12C001886DD /* Build configuration list for PBXProject "SimpleMovieEncoding" */; + compatibilityVersion = "Xcode 8.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 1F2393372071C12C001886DD; + productRefGroup = 1F2393412071C12C001886DD /* Products */; + projectDirPath = ""; + projectReferences = ( + { + ProductGroup = 1F2393562071C155001886DD /* Products */; + ProjectRef = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + }, + ); + projectRoot = ""; + targets = ( + 1F23933F2071C12C001886DD /* SimpleMovieEncoding */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXReferenceProxy section */ + 1F23935D2071C155001886DD /* GPUImage.framework */ = { + isa = PBXReferenceProxy; + fileType = wrapper.framework; + path = GPUImage.framework; + remoteRef = 1F23935C2071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; + 1F23935F2071C155001886DD /* GPUImageTests_macOS.xctest */ = { + isa = PBXReferenceProxy; + fileType = wrapper.cfbundle; + path = GPUImageTests_macOS.xctest; + remoteRef = 1F23935E2071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; + 1F2393612071C155001886DD /* GPUImage.framework */ = { + isa = PBXReferenceProxy; + fileType = wrapper.framework; + path = GPUImage.framework; + remoteRef = 1F2393602071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; + 1F2393632071C155001886DD /* GPUImageTests_iOS.xctest */ = { + isa = PBXReferenceProxy; + fileType = wrapper.cfbundle; + path = GPUImageTests_iOS.xctest; + remoteRef = 1F2393622071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; +/* End PBXReferenceProxy section */ + +/* Begin PBXResourcesBuildPhase section */ + 1F23933E2071C12C001886DD /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 1F23934E2071C12C001886DD /* LaunchScreen.storyboard in Resources */, + 1F23936D2071C2DB001886DD /* sample_iPod.m4v in Resources */, + 1F23934B2071C12C001886DD /* Assets.xcassets in Resources */, + 1F2393492071C12C001886DD /* Main.storyboard in Resources */, + 1F2393792071FCB1001886DD /* Assets-iOS.xcassets in Resources */, + 1F23937B2071FCDB001886DD /* lookup_miss_etikate.png in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 1F23933C2071C12C001886DD /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 1F2393462071C12C001886DD /* ViewController.swift in Sources */, + 1F2393442071C12C001886DD /* AppDelegate.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + 1F23936B2071C29D001886DD /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + name = GPUImage_iOS; + targetProxy = 1F23936A2071C29D001886DD /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin PBXVariantGroup section */ + 1F2393472071C12C001886DD /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 1F2393482071C12C001886DD /* Base */, + ); + name = Main.storyboard; + sourceTree = ""; + }; + 1F23934C2071C12C001886DD /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 1F23934D2071C12C001886DD /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 1F2393502071C12C001886DD /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 9.0; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 1F2393512071C12C001886DD /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 9.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 1F2393532071C12C001886DD /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = ""; + INFOPLIST_FILE = SimpleMovieEncoding/Info.plist; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieEncoding; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 4.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 1F2393542071C12C001886DD /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = ""; + INFOPLIST_FILE = SimpleMovieEncoding/Info.plist; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieEncoding; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 4.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 1F23933B2071C12C001886DD /* Build configuration list for PBXProject "SimpleMovieEncoding" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 1F2393502071C12C001886DD /* Debug */, + 1F2393512071C12C001886DD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 1F2393522071C12C001886DD /* Build configuration list for PBXNativeTarget "SimpleMovieEncoding" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 1F2393532071C12C001886DD /* Debug */, + 1F2393542071C12C001886DD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 1F2393382071C12C001886DD /* Project object */; +} diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift new file mode 100644 index 00000000..14a4337e --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift @@ -0,0 +1,46 @@ +// +// AppDelegate.swift +// SimpleMovieEncoding +// +// Created by Josh Bernfeld on 4/1/18. +// Copyright © 2018 Sunset Lake Software LLC. All rights reserved. +// + +import UIKit + +@UIApplicationMain +class AppDelegate: UIResponder, UIApplicationDelegate { + + var window: UIWindow? + + + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { + // Override point for customization after application launch. + return true + } + + func applicationWillResignActive(_ application: UIApplication) { + // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. + // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. + } + + func applicationDidEnterBackground(_ application: UIApplication) { + // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. + // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. + } + + func applicationWillEnterForeground(_ application: UIApplication) { + // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. + } + + func applicationDidBecomeActive(_ application: UIApplication) { + // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. + } + + func applicationWillTerminate(_ application: UIApplication) { + // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. + } + + +} + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 00000000..1d060ed2 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,93 @@ +{ + "images" : [ + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "3x" + }, + { + "idiom" : "iphone", + "size" : "29x29", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "29x29", + "scale" : "3x" + }, + { + "idiom" : "iphone", + "size" : "40x40", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "40x40", + "scale" : "3x" + }, + { + "idiom" : "iphone", + "size" : "60x60", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "60x60", + "scale" : "3x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "29x29", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "29x29", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "40x40", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "40x40", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "76x76", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "76x76", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "83.5x83.5", + "scale" : "2x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 00000000..f83f6fd5 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard new file mode 100644 index 00000000..e411a78f --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist new file mode 100644 index 00000000..16be3b68 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist @@ -0,0 +1,45 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + APPL + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + LSRequiresIPhoneOS + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIRequiredDeviceCapabilities + + armv7 + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift new file mode 100644 index 00000000..d7136df8 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift @@ -0,0 +1,118 @@ +// +// ViewController.swift +// SimpleMovieEncoding +// +// Created by Josh Bernfeld on 4/1/18. +// Copyright © 2018 Sunset Lake Software LLC. All rights reserved. +// + +import UIKit +import GPUImage +import CoreAudio +import AVFoundation + +class ViewController: UIViewController { + + @IBOutlet var progressView:UIProgressView! + + var movieInput:MovieInput! + var movieOutput:MovieOutput! + var filter:MissEtikateFilter! + + override func viewDidLoad() { + super.viewDidLoad() + // Do any additional setup after loading the view, typically from a nib. + + let bundleURL = Bundle.main.resourceURL! + // The movie you want to reencode + let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + + let documentsDir = FileManager().urls(for: .documentDirectory, in: .userDomainMask).first! + // The location you want to save the new video + let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! + + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] + let asset = AVURLAsset(url:movieURL, options:inputOptions) + + guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } + let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first + + // If you would like passthrough audio instead, use nil for both audioDecodingSettings and audioEncodingSettings + let audioDecodingSettings:[String:Any] = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples + + do { + movieInput = try MovieInput(asset: asset, videoComposition: nil, playAtActualSpeed: false, loop: false, audioSettings: audioDecodingSettings) + } + catch { + print("ERROR: Unable to setup MovieInput with error: \(error)") + return + } + + try? FileManager().removeItem(at: exportedURL) + + let videoEncodingSettings:[String:Any] = [AVVideoCompressionPropertiesKey: [ + AVVideoExpectedSourceFrameRateKey: videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey: videoTrack.estimatedDataRate, + AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey: videoTrack.requiresFrameReordering], + AVVideoCodecKey: AVVideoCodecH264] + + var acl = AudioChannelLayout() + memset(&acl, 0, MemoryLayout.size) + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo + + let audioEncodingSettings:[String:Any] = [ + AVFormatIDKey:kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey:2, + AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), + AVEncoderBitRateKey:96000 + ] + + do { + movieOutput = try MovieOutput(URL: exportedURL, size: Size(width: Float(videoTrack.naturalSize.width), height: Float(videoTrack.naturalSize.height)), fileType: AVFileType.mp4.rawValue, liveVideo: false, videoSettings: videoEncodingSettings, videoNaturalTimeScale: videoTrack.naturalTimeScale, audioSettings: audioEncodingSettings) + } + catch { + print("ERROR: Unable to setup MovieOutput with error: \(error)") + return + } + + filter = MissEtikateFilter() + + if(audioTrack != nil) { movieInput.audioEncodingTarget = movieOutput } + movieInput.synchronizedMovieOutput = movieOutput + //movieInput.synchronizedEncodingDebug = true + movieInput --> filter --> movieOutput + + movieInput.completion = { + self.movieOutput.finishRecording { + DispatchQueue.main.async { + print("Encoding finished") + } + } + } + movieInput.progress = { progressVal in + DispatchQueue.main.async { + self.progressView.progress = Float(progressVal) + } + } + + movieOutput.startRecording() { started, error in + if(!started) { + print("ERROR: MovieOutput unable to start writing: \(String(describing: error))") + return + } + self.movieInput.start() + print("Encoding started") + } + } + + override func didReceiveMemoryWarning() { + super.didReceiveMemoryWarning() + // Dispose of any resources that can be recreated. + } + + +} + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/lookup_miss_etikate.png b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/lookup_miss_etikate.png new file mode 100644 index 0000000000000000000000000000000000000000..e1317d7819b051663709c484c7b8c773e95ed6d4 GIT binary patch literal 202596 zcmV(~K+nI4P)+#`c^?xWFI$z+lRcZUND3qPs<=fC^kzR*`Oh$If2R-i;ei4J^_0WbK4 zzwj6Jf-mYX{2%-A3m?oE9&k_zgPD+!RA7N1uyB9?N=TUXVIe>ih<;uW0U)hc?SK2@ z|GxhFfEW5g2MY8ufszOe`HLDT;e`f#;lHR~cu>FKKbQyn3%}N94QenCIH-W62Hx^J zAaE=}EGz)?L-PB9kl|he|GtF(>-rx85)SeO2TB-L!q`Xf3-?ZreG0#*U-+Q@1HY)h z@E07^FXq5sIQJ0@2%I=6*3SE}1bYAPJ=Yda%s-d#-1+_Z|GfUkfB^vq40OO@0u4ei z@C)xFAop(nq6YpC>I?p&eo-&{g%9eB8E`NIy_kU$g4q%z^HwMJ5j;fe6X_KJz4P_o zI)eXt{!hKPg0baKbnFNFbb58w2lOxM7k=R{>M!bF)B*pZUiiiQVjd9qSo>dzTbBeu z36k0>)JNcd-`!iC0Axa+i?FZ5e?9-_mhk?1KoSF9C~>YuIpBr+2)>s7^ewKx@Gs_H z_(gr;7uAxFwS-%E2Ng>#rXW8Tp&py+$2R+ZwB^5lCgA@#|Cbi~{_Ne}>coLsfCCS7 zP;F%XqF(UAznK48S~S1#FY1fBB_CAcf!hkj-v3zF!FxHk{C-OQuugph`8WcA|NZ=5 zZ~f7}qT&s#u#K={mkcfxnN?j*Z+%`OUr-j!-)w);j0atRUlMolyMv2bvF_p)B(Z=ZeP{PF}BA7J#|G$Ot$>- zF_C?aZS=Ku#E&KL=MwIpKjgpnqL9I8T7e56{U@eDf%~{}dD*p)gX@64rNYS!X0F4! zZGCUM3`^t?pw$Hg@UH&j(eEhWj{M!-usXy#1wEvK{wUNtSM{NuX+eJ_Z^;+(^ z1{}-KSNmEm^i$ht)|Q_}pYag;r}adXiFK(L2KwU&4Di_hesv#P?Qg4?L@Wy_>_Wn0 zJG?JyP%rA(Rz8`x%!3jYs6Fmp!u|Dgoj)r5SjFdGJ#WEh&j(@P+VFuYPz8a()0uF& zgsSxR0!I@SHNn_aQCsu86=J!8ShuGkXFq>ikw1LEjiW!0_lh=xdnniXiBxEU!bY!B?p{?oz^)DUDttIgZW&W`K{x`dGn|?j<&ga4X zZMaQ;-_D7`3Yd;qmq8{?6}VK(^-IgeHo9!b++efXPq#l~!RNlf+VG97X}VQtH3TM&HTh<<)Yz&l3x!|v}DLApW1$6_w)X*-MxSA^g)1Ij8?DCQKASPKJv zVE)+Hc`$S9fB$GBxzdV>x$Wss>%YU|CI8POP~eS99}evPC`be@%dvi^@U(+5e3_Uq zW{HRur8&@GjvcS(^2aUz?60j2@dNv>-QJ*_A4eb)Ktq=;-@_A+%C)Q$pcFC71B#ds zlr0m5XAtNtztEdFs5fy;C*d6dctQU5)U6z~jppqGco@^$`YcPjjg3hGAG`Ud0bO3i z+W=iKjY5=7-LP(27it9x@pcU|nE`KfO%5=+@UfuYCyvnq)HeuPO5fYP*VRTxaNSTE zOvXcO{b%w&*R}A@)$aFZztA*0GN_7tR~_UxQ{+i~&lnWUxo**{k1E`uZvVA*PJXQ7 zgL*b5+J0AC{`)}mpUb}v)s1?TsO&;%8x=A}vco|5wnU(q_@kH=s4YuwnK$9!O(ktL=+xX1faZ1m8=KP`?PLVu3HY^cQ#B&fMm=!2=VUuFS?E!j1JVp_3Y zqj*>tmdEXFpF0}9He>U{d+FnPMYeNkJ?(u>wDqac%f10O+xtY|iHne3=w`It!aL{Z zxa1Y(E0-3nt!c}?$-UZmf5Fq-0r3-({xJQy>680LX#f0y;Rc5xYQ>D6HVaH!MG!_V zS9>#;Q*V>@=@4~>4*NoBA7yAQyMxEu5-r~9-s{_Ou{DWXWs=3UH8rBm-s-u^wQQ$A zwhX`FtF_jYo!AYf%NGj!vJ(9^veGH)pBQ+G`j?r-z;;JlcT{^v!MD@;&_Ub!-)C@Z zParQ9j|~!j7^5wJFnb+PSVXlW#brSVpB7d)>>~Bs4l9h%5g|JrpjqdjV@z)w`oqfX zJ-?sPw*LEKNyOHb@a8QI;u- z$*|8Ns7lWbHrZciu5-SrTWdkN+iCR!p*Ecl=nxNC?qR~c7@7Z=&)21JiI`3;#{;t3tzYV+XUXAt(N|zmVJrG&bR5;j>+!j2#})< z=naFkQ%_u$*lrA_Yg*B4L-~ykGp(f`ZCi`*;y*i})Jht$fo5%Id?z5c@-G+%DMF|S zbwKzLcx(NA1pcW%_bC(O7H*YMhF-lQTw|ue@6=}jUfSP2zWIj{kCep^?9=p=q7we$Ysr3sO>^3-tc8EtR@o6L4Ma2#LH6<73mo$O%z(Ux^CAMJD>4z&vRen zww|%AXMMca^Yf`dohr)3?^Ov9mH#6(N?nS78@bfo+(U6WbI-8kbg6d zpJ@K0M#IM~-y)g|3kp|(38~zsh>2xgP~|D*jFWlbXeA-qmv$^TH5R+odhVO}mS&I1|ESxDh_ zz%IDZQDt}&hwIx6`yb-v4C>nvCdvb%&WnH<4qS;{^41~I$^8`_FPS5Vgl&E9AZ&#N zeH>341W<=IsL*Y38pq}2f)@r&oW%rN={Tux_hW&FLy&elRHQ8%LR=!KncER+r<9m? zSaU&=lNji>o+9J2`5L97*h&Z+KOd=NoT^M4=?{^81wnOQwFgBCW>Ip^7KDHIdVAHrq zMwSS9m2RW~9hR*Fr4_R}ZFZ6P5`&t#?pH0g45`}CZguL*8$lGg6=oS)ZTWXhymUWU z0Pe?!cM#xGyRo1%dA^)-4We8VGQCU zt_?t|m9pBaFYDjt<3>Euq2!I{ege3U8)JCf@WG$GhG{a5+aE|Pkc_Zxf1_b5N$0n( zFXHXl>B`F-#Pm#4j%gRVHl;W_dtCAW%cXKfV-rhhC|jbIf0COpW8^0S z(5@r7F~u!^U#VP`SvOx6@D0DHnbBNUY5BoU-ML)UNo3VUV4o(@h03}dmkE#XWz>3x z%dImg2q+myJU`|nAiXqc9mkzSdMMVtfL5BkwZCx}h6_xLq$99cCROPQ>%uBs#Hcz6 zQD?=QI;=N}iUWl+JSUmVs!T2y<$xIkO!Y-|`ObNSAM$%6(5LC-hfCNIK-`Pj_p29L zknUO#i7Hg?loFILB0NtE_$~<@@Ghm59`e<+Lwc4agN;*My&oD)^Pxo@-_~a`jt_su zW`REPKcE~qe^4#Wg%)NJ*d$D3JL&DUa26+x(2(o5Q`B!FDwGa}>I{V{gR2PD#Dq1m zNNf+wcIjgh!yE4O<45@QxwHP%6Knrg+vZ(osD(r$QsDLnhSLgLC|VNh9S-oQIJcyp6r7S`;NNzb*bX%zeMz1#k{E1tDX0K+e8 zxhd7E>1^uOvz5&fR%uFMPd66!X^+mVI;N+s)6L4o#f;Ky<6x>3vR&l_!WYC&nY4Y1 zPr$m^x7bNY|0Hvt7Z`xUcg)qgH_;njUEsI*y()(^t*~bi#D!1mSVrF$nxzvB*F~f$ zR}wTA#EbU+0x4_Z9p?}D)qZj$%qP;`0PN=4_*JW;A;+zV& zP`NIX_$&rdHBZ(pb*YHS3=1n=>?eb{0ArPrl+5{Idm>0bpoD&0gIn*}&g@W^aLA3< z*6Rv4osw<)8m6Ht$~ZZ#fnGLnTEjY>FT3wUnCLW|k(tiqDvS2Gq?vNrA(dxIpow~Dr?`M3qb4iFzr+pMf}D{1TmDq|5Bq;3P|sDE5;qZXS36A;w&1c7E-Q+6W*6ta z(6{xnRb%+PQt2qq>AaYMt{TCGLsS(>m?^MBG*C^dIuu?G;O3x%kLLWb3U-t*#3Rwy zSBtKh0l~FsG!o7>tJ6Wq5)98k-zFv>AE~*OBEIA3x>F=BC&l*z3sE(DJsO= z2)ZRNBDdw?^Wbv|pIp6H*^iBc?~0~XTDjTocY6<871)8looNew7sHYEfxex?mP4+9 zS(0LU$|`qadIq=zCCP>*&HEuX^@tY7prv`s7-63w713 zrH$iEPJn!UfLq=-_NN(Yh&a%$Hn`nj!kgZo7?ewM1!0{i&@5wqGqSFMzAF+RWY!!+ zm993v6;@F-9ivM?c2cM7Lgli}2x;Ql|G5=u`>=+esAP25 zr{wZzt5p6bUR5H(^>UQ2v%2>%!ZlD8CR|-zkhDifEJ8b0iTH@c0=5BCaRjkyDYqHj zIOq;y9tP+R(^`uf5?vs8!^`Vh6}UWM*wcDj!*%>ttJfr5)chWG6 zp}BP4rnK&yw(!bT-s=bdcEg}Qg5bf1NbS91r(tLqr0oNsDO{!rJS*h7Opw3r!0(D- z1g~~*!v-(<6|GFV4RL=ssRB}YJbT+?F}lFtI1awXOJvr%Tdx`@E_+?wT6cuUJ-!S?=F&Z6^%_0`F8!) zN76BS!wRe28^wT8)j}4HeV9d?%UKMIq?T3+qPUunF_P6aTn4#i9_v*Ng}RU_4+f5oxZnA9o)9jfQm<1y!1MRd@15*WP#dXh&B`;4Vzm(_5NQ z7t@Y`5}B+;7ipVOf5g7m;uXQDY_AKvguehBFp#aw8r1S$=&)vSlB#*3H=K^co~5#? z+JD4!C-AFxv`Adx83OAWMylE4mHzUIV+Qyq^Y()g>iyrBbi?O+VlYTC+kscp=$0D&6ryz6A!c!*x8qeV>use;>vAM9txkFs!j*(gF_W+*&z-9Wc>`xN z67k92iO=P|zXisk>ZaQV=(&<>)gW~%jIygLjqC|3Pv@IC?9=g0zUsQ1vt0IM6&!oW zT^rY&58x==i7~0sG-?3b3Oo~E$EVWy7SrI)i&M@o&;+7+mDttahj`q zIez2e_{TM@S^VQj>f0G+R!yXl0hDhT?N#L{Tk29%?__)Wv4njK^w9Zre-{xq{H5D> z*}kA)z;E-S!G&M8XyBIVjyH*_@zR`ip*K$JEFI`7)h@e~Q@(<5%|cPlb0u=tT|BU^ zmypLUhlpAZIe!+?ti;wQBkxy-V2ywlIGi_2*4+$tq1R&BIw#FlAf3gqPRF-3xY8qQ zew8n0_)21WilnY0vgWFET`7R2re?<=efyd~>@JqkC|{RxN(q`zg!j9G) zT*YN3I_=Y$o`byROsA}7emXO3*Q|CYiFK7z6Ge5fHL~UlU9LCrnsqvo%39=`tx4wW)OfqWD0gX4!b$ZF z@QMS{s9U)I2T>Ewd)0$P6oTf?>za1)Ga@cR{+nKJTGF*wS zis9>GsIF*rnUpJJIm^2)&KH&yYLuuXYB{j6t&N|V?Yl~40iga|F*Yl;-_}dvd)hc6>qfwXfYw8=ND+!g;`NdQPu5hX*5u@g=#hS8B zbe&Or{@c=>;|Q$-Xg6?UA3t%Bk^Jiu0&DBK?1JBLv1)9)9x-v245xh%1(!LAobwy~ za$cTq`@0ZQg}Tbi70y|g6~pBk;3kW!yf)Qc>csM18LOl(x5fMce;$G4uRhVcC0-kN zQg&A&cDtr`!&1|0a#(s7mdC6&`P;ngbRM2*MRgNd%2T`AO^Ui^tgEGl6(Z4Sy{~>SktU z-*U&>28L1bO^ph3U5ulyX4e|}RaYU`EINwGswsvj`c}&;obSN_?RP_rtu3qm?ZHT3 z2>-gxDt7b|z5RN3j?WGIs__94_Czw%eiL7_E_6~|0(X+bz8s^R zR=CCMYKO%xCOPZsg5|3m>LCkxcwW}&IPD}y&2n5dqiMJ8VLa`fjdIr)tiI#vvM90> zNJRugo7askgq|}k2FHDnpH#*&H9bc`tG4CNN!c9b6HQxE6#tC$yoU0qE79<(;z%=# zFcXCXEYEVLtBPQ??WzruZ?$eSI#)P(8=#Nx+LkmL0k!t~d1>QgKv7!*6V|z2DXOeU zco#8(s&C5$%K+i}f#L{`m0IHGLNf|N`*M@YBi3bKe=rEf=mWY*RNZF4_X zP%}5%HrIuqI_2BG5er`CB3`E24&qZWYF1F_TC8-u?d*ylM|w^xs#~O0CPuhw+lZmM z0F&!1ng-7`cnE1ty82vQ?tgj)LLXaMxleEDI*YXLRdgIzCha0)6$EP*RqC?i3!TKT zstaAEC=F{OS>Y%&Yo^k3-T{Xe^VB643kdBI2U?H$lX_k&2sqY?=4ZNsx^l^T3E*=V z>4vL2Ho9P?tc$GaNnBMOBAj?M5~{k89P$jas#~F|!dg9`oxW}S?2-b<-h38Uc)3)`$2?v%X4n3M_&s9y|dZnYP6y+#0qb|j}Jngb3HHoS@OVb&iHf7zZuEw5n zl5433T7i6BlRSM*v2qwOYf4$ZG`hgF3j*!Ur0@Jv=e;_6r{xUr&p0@4{qg7Fra2Mzk(H~H0nq$`~v;N4U>E;4K8E_4;0so%xnRU)!VWO`1HtZM?c zM6in2I)>~GWo+QET-g@!=A-)n4r+yG%RbGOr){-bwFK_;{IdOO0cF3flEgIe|5p7M z>utRqGHHJkDs@s^5Va~FC&3Xji=QF$#CPb$-LE>K_fb%1%cVS@pN_uOe|wE@3+y zXU(+alqNGmo>F?1O3UH7A{s5HRUG+gWQz@d^gt58YkkiimD3CJ?s@b^LT`AjU-Kwd zxq?06?GW{L{w~5wu2D|noKB~h5V~w~P-5yf&8t|hG;M|Gf&eermFngKv>Dzd`9q72 zPie%*2Hoo9J=4`a$x$nnTfWHdYXiNV@;r%i);IaG-=2wbMu?crJ2c)Ks;fPLL8YeU zG&g6qN-8%K-S{$}{j?7xq2B+^i7gFWKU*-uA48N|$7biUI=>C?Bjs7dIXU5~y6_-t z&WWxHxx$(*$f#OLMcayUsjGlW%^j$9d2EVba~F0$3RrJe{dM2mPLb--)3zS2J}S9u zIQDchP3NV;)Zxpz9PjcMo;9wzM#Q{46;Tco9z~_` z-9f;81b{UBT8_82X=MSrbu9h-d()$b=pKe+^_mBDy@k==j)0u}c7=7S+p;67K_8R? zC?zgN%&U~qHAjI~+bd1yE;KyGlAdQpgjST^F{T%A)}AfsJ6zf{qPI@99~rD_qawOQ z+K}isk>z*QK;M=eZHBfq2F5MQyX^^SUtp8<-ZkK298QRn?QWysFg$wRBKF_ zK$@$ihsxC%P&-;(9J-hFkDv1Mi0EJU?e$}72f$qX&+5jQ=pXoniO<8!N8FO|+Z$_2iLNae8kHPu>IkT>JDHY#E9u6y3 z1eTaNFOkt;$3mfo7u|hp@Q~(;02)E-hM&)!kBHePr8y^uT*%o)QHq)lNuI6}fw%AyV?*gDi^Zc_*+p{H425bQ&S~Q*hbTu@%?)5gbCV6Qx;^saby=YeTWN%k524G02-?xWx(czP z;7`*$<|q0t}*EDNQylTk!Lnf zu)D8o@ULm8BrVvx^m61S@G5o;b@> zuZUtw*_|1%=DM!`&yOUq)^d1wFF@3x_@L8;m-l^_*AWD}|Nf39y(ji{B2>=67xCK+ zS2&|B3uAc7O2T%yrX$^b9cD^eHV(5|{~fy-wV|ls9#eDg^ez8$|D)yCZHqRMNLRou|d{qs{Zw%SJ*@83|6%|_f8{lu9@+a5P`Ix!+H{0LP?V}TUYg;FFJ6QRfJi4j`Mga~DbvYTPVg>6g zu3%d|LCM`=nQHGQGsZ>_@n7Gy1KXOeD; z@)^g}Vstytr*!B3s?bS(*L*qtpfkJ3YIYUJ9?ZulyxptVA}{%S|9=EAwdHS7yBgx!t2jFp zDAig@z0KbWncwcjdkoK2PD7mKBrY4$bB?fQ2V+^T9*HKxUqMY(af}6k?pE1U-&n50@ogireA(%oo}nDmmpXK4C)qXl6}URG zSIU&JwCOEyKHRG2jT65AN#*!$~KrE{W0CizdBO9 zrV?=3u>!B<46qJgZ^hr`RDOFV`8~a6PnAQuT&oGGJl%B`jeYE0k%gMFE~@Wue40|( zjP%D_88q0Ps4eweiZd6K@ zi#e}k$X6-x)sulis#?k5*oNi?(OpgNoA6|ljLVp7dtJ60yT`HeDD2bD3~%quMNf-z z;9-AP4EZF|HOMiqiwIW_cu|!U(R1QDMW#VrOQ4!dW$r0Dxv%G*sKTxA-16@Y_Htv| zl}mH=rg~4>-JZBhPpx8SfE;i0#_t}cUkXJ+TqR}rGI_a^!x>DL-pe+XqO9)kXVIXm zp!<#hSTy-T2buJWCi(gb+@$lt{*pfwbL*U zd!&!))INeJe70j|^MTc@U&%SB!xf6N>Ng=X3hIhiW;xt%3@})Hv{jT)wK0-baZUF1 z>sHd;-*97qha3K>|2t2&2*;=06e@Q{4(*{VeKk~i6X3U<=$js4Z%M=FQE{@f5_P4s zJRNy>76)}LT}0|jHo|m9<}uf+)#>SbTb+*QllT66XGRsm3NqYj05ly6m|)T-e*VqmOpaH@?&vs`Puuw-xCHtU}CdCtyd5IspbVvI(Mkw=qR1Usp>}m;So%gFNC=m za;2wGDOI!gxL#mKC8|cG>rH2Fc*Cy8eZpttV?m$RV(nxeNYzxR@sAeG@d+qg| zAY)rqcAsIvwEAYA)``Cz119ry9B{bi(6r?=vr3ZFnnvnzs^!ugB=ps2B(^8&-0Wck z9evKg_}I(ZOmOX(Ve`3vjO^x)i`j(ll;x=6gv)vp-P7(Tq} ztEz~kDEFjwjh;qj@ES1txw-ocY#$3+uV4_fOA_}TXj|IM$HxKhK*3tI9Ap;}tIq0i zr}0G)uy9pRv2|3 zC<@YbEngWux*NHyklO%l1KMwG_VHhO?0>Kj&(e;)|BX7A-g$>I_}+g9>+Oa*WPVvE z@vi!3XqXFqmoiC3mM6@rehY`j{qAb?R+lQ%OgdJr-qckHryn!mq$dE{@^cURs1+>- zcv^H2yx5v97WqSq!qg}g@wWd#!}^ASPWNOH*NiiY>JeL-CXE$jO_V)Wq$!>V2R-+9 z4T)PU&_g4`*K$!zesle8li3*8^_C?$|UqmH{-d-D@pVsBYvZ*=@bpuhL|i^iYmCpjjc5mMlk8-TN;XdwqhzBzD+Uh+$#%oUh4(D%;Y8zCjNBGHc+Nbp< zhCRrWo|n=^Ad$YN9o?ox6{(sF?Q~~}dXQCc2Qjf3&v;n>z2QF4k0V+8-*G_K4Ef&f z?IiB6a~rG+urS~B5k!~)r#)Ej8E_KJE7UAvSAts=v%8+EvQ@&-^!h4cTD*CYu?lWK zJb7&SpDB)HRX?vgw9SoJ*E#MrecV`M($Wd185O^Sn1uXVowO-)+2O8XjdGPx4RHl_ zMTt~(I%(D8uJEN+Ax(1;ciTLz|9i)7FmRzdeiNLL;K;jE+tZYdNx9LZ)F7 zV=rOLDPW0;cGIY-v5ptHM9YdEj-2J*; zOq}U*j|y_n4P9k+i;?jB7-ZR1R$y@O84~r-2A9UN) z2R=N6XII5>_ghs=R}Ww~t+#S`4(n2CUh_~?*Z8Q6GQ(v;o&}*nm2*l-&5vQZ-R3RY zeOnK;vwob=T0`eB9;oxvadNATcl$@zuJovZ({WP2t)o2wGlr!{;MVBy! zTqCN-o#LplT+XPuQt}wL>xwlMSi1(1O{Cmw=zovAlf8Fp+mBmvr-;jI&}rFov%cP! zx)=LKD!$2JzN|`3^39dbio)9*(Ig6t&2Mg@RL(sG9|PG#@?R*?Y;apYs2q zXI*{W1919fJy&OiVz(`dHO!bs{XBl%wKOy-Gc~qTH@at=?rqTFq|m`oSH(u!j|I?6#!-A-}f#YlrXZ$$onEXFN#J zCx+Nd;Gc@XX@)(lAzh_Qt2yH&WBQUr;TW!RgsRsPLXK4cS#zZym;4Ps>tQ8Xbz6@m z_>*|tGnRKi)6H9rmuo{*SaW}CR|maIL-}o9Rfjm8!*eLlp6+!NRY^=06@|+gqC9uS zecm-Y(mwFTWj$`{lK)1W@lmo>_O`CASKCBq4cjhh&7mfEr1y$E@o$rkK@R3G>+Kkx zNlJQq;_hV|;hIKN9nn=%EYbHjx)tI%fo9v&b${>uUq3R)>f%m%+x@(@8$E2;(c$B< z&dwM9r|Lkz$x;47Z)NwCgzHo$=$a8Bd%EmA0?%0<`%O!G{&&A|GO&~IJe=7Ucz6^` zt>Xq|>RYg*l{*oC58CA45Ogi(xC(xwv@dJoS#eQ=?%S?VQ~!#hR)wgtd$Rj{x3|ZT zoyxA=_OkN1xa4MymIn&}~ZC0ekIkwWk|?er(4k@a4Fe zRTSzV=0dYVs;)#74n@=~QC1ed3mCLg`sz+e?Wk`lGoMmoz-;-sFUP|f=m~n}M%Q;T z^mGAPGW8%|{;fdwAAZngDUYcvbFX?%$ztDw$ z>LNv1skWYrqT4!h?=<|>e|gbEeeWsOI|6AnA}uI-zu@_G-FkSVFMLhDN4`8?sHfe@ zc`=7&#dNX9>qf2aZa1D-#gliXMth~5Rp}t0XGO*TPuRaLJCE#2V`wId8r^fQ^*@KN zcWj5l_WQq+SGQP5U*JHH96hc0TCE{b6bgkiP(btVDl+K^QHdj=O0()-S7OYXkm{^M zv@_fr+_F>ppRgX_rwfb|B-*e=&zNq_+6wM+?(X0pi$LW5Qnn*EqWEy&KN+HfL}@OS z<5*_I70zOo7Xz7%k2+KBWifT}T=p0AsbuQk;-B0S_hBnWh5_aqpsyp_vgXj1XjDvI z=9T(@BUlZVh$Bb##Z^_@KiOCc-Jnab)?*S-EIv{+JK{efeH>f-k{`#(Fy**1C}vzb zqL4;j`cT>Y8jkGp94oNO0aQz-_~jp56C>549>ptQSvp-!&%60HbXW{~!Qa3E`yXus z{}v9}R|@~>D;gG42bjUlwaLg^AS1Qz5S>gi$fAt0gLypz_zBmhPSYkq$7w2`*;}cmO5zt)&oqVt z|Ebdoe(BP6Y!3!`7N&g%JSeL7;spK{{|5+X#lfSj-Ofphzwu z#>Qq_vTNLk+L&nk$FqS|E%2VBrr zP!S3FXVGCfb#$2&!-$N`o@hz=MIYpa#UBo;3Yv%jHk&FU3dEE=83hVYvkUy1sKO!s zOZIbk)OSQt&4LIlH#eN_#)H12kjW2={wu+5V79`}(zmeVIfz1CJ!}vBqaf@#x6=~s zBf*jBo5W+~u>m;2zrnv@AJ-{;!M}X10|wt%Ticu`ZRM;zQ#1w>2`(%_E@y<{RP{3| z^BE{J6yW4v8~EpdEl%>2sk$^9O%dpHr}97oNqxcpM|QwZ@h@3SsWRzr#t^qq7*6p- zj5kosrZKvFMK^d0c|6N!(84E^VFwQ%Nr5tJ1m&Ovh68YO8^((k&lmZD4*!CGA&%tP zk35?IyM5scbRrM1$+q@EZW-l{wRB!Ka+iUH?zFPV?7|)nuP(sxcUDATJ|tvN3wFu;87ldD(WDoY8X&I6j;CG&{?UMzhyu0W9the_NE=~n_SrkhC8xJvTR8f z43~1>(-B9DQ2?!y4GtU{^i!M&QI2XbO*OS$2GUH9*nmI8!C$~3Mp4;E;fgKO7T}K4 zSeYM2^EE~X2+xs8CbJOEQMuEOi!GyM3AgqhBCy(uNkN-jWK)PoC>f5{E~ z5A5UqhwQ-K)kpn2c!KblKLUX+b8G@8gOqb)vXrmlKdH2ed(#MQ_*EVI+K;-(JRO>3 z;i`g9pWx#2dmI8AJb_b)aXlaLD?6YQIGpQal7|6w(&$*r9g9K;Q$>@4a3XCOD&#GV zK@R%yf&VU}A+f_OuLzeeQ=!(_KTsUO6WYKRYrCIW6R~4T;+9U@%3glKXqC>{AVZ^t z+E30-ahito4gMoeYOtu6(?H8Lggi|GYFa4`{(S5(A_$j#B>~@!154N^@rkq%5Y_Yt zDA@^L(UeW_y< zx7CyjFMvF}DXf*L6=^doqHgyHJG83PE?OF)y`-X3_zU>Md4eaF?ZW%LXra=wRwZR1@IG zv%mdcTEO12Z`9FO(nz9*0f89YX;cmH*gJ~TgClQ}BI=wj{P*@~)>=Zr)LFLlDY>*L^Gl}17{B~-RYk97kzsNPvI}&gY`G|(lX!QgC-(6@-wPX zs7PZY4qz|%Rzwl7q%%W?IqawcSs`a!*`=se3fh7k>FB507a)}rPjoyy`k5kDF@3l(D#;$3a= zR78dH8Bux=RX+;<{&yA;@NX z(i{G?AWf~wlKxrn6QZ=rm3(l)V_!lEIq z1IfH6r1Wplvf*K?fuH)da#o1W>Q&rbTJcWnk)eh)Zm+>x4t&84{*_03L@`Q5jmN?n z4QM}6s!zbzrb7$x=y8cU;svuT_cc}xYyIX8eo!po4s<-d>ma%+aqvQ8zB$?f!5sSp zPT(v41zYY0oU&8M!XJP*fG`@|usk}xM0uin%0VkXB!1j#OM;dz_<=g?5)EGgSlAXx zLRbU;K_OQ1bB(p^n$EIef6Jo|<>OgSwzNrpGlXd7@)-1yXENY8xW5h=>kw3G;te~- zVOfB7L=t1rE$RSghdbMs|DA9BZi==Ni(dSH10Uy?TG3`>%b7M2U4;G6PuFoQ!lQr( z%M9+YmNI~kV*(39T#jD2rcwuuSUVfO&YwYe~pa{Un83A zpE>s&5>LZ2o=ytqu@2u+WEu#Uw2enO_z1^C9e=Yzb_yUJx?w)qAV|m~^2IbcFIN)3 z(b9bFwbNhl14d@`G#+k_I16S`j=nQ#u?RUF5sQGQ6Yd91Ex>`2EgAgQfWPAprSdM0 zKqq}Wr^i7{???E-8+5nR{OkN2+KR~sbZEO7WikIna*Do)ZDrk$`aoRbCUV-5C=FaE z{geO-;o71`F{Q9W{}vC};d_H}Ix>&y&P_yTyGR1(jMS9aHTO8N zSsXdAR{V}3|o07 zH*&Sn7NQ*_mB@JDKYr8L%d0}7bPihMUiKV&Y*GU}w;=q$3(VO4`y_9U&A;Pu#{zBaZzL_KQY&^qgV-JMa(mWQwDW! zWB%iy7~EfMU_;;707u9P7x&g89OiI$5$1$xM?rd>b)rC8HZB=yAytYL5O9DW-{$S%urJAxwwH>X6HolEFP9`k)_GuMrWaaEea)=J~UcSkl{t`NHCISCN+ZrM$Ik@iLzJ|g8!*v7K}gZCL4@2b-SA>S0IB{P{kROEqMtMuRD!k% zJ5<-$f?$f=7rkhy9}&xPM!#$QArpKfYzTA74~z56Tia-Y-06#iJDhGn94YrK0RF7B zBreauG6#UvuDN0>8>4l=q;DIq-&@gp(1i6j^aJ?DW}eA%z@zP$;6~px6O_ggor|e~ zBkq7H;fHtt-Z3(uIU4J-!Y(#wK#plwcRLGRqFi1Kkap~(jebnStsgu6NOSp?zA)pt zMSvgQj>B=fbHnVA`#yPv*oq2N=48Pf!QLjPV^`in$blr68VGS?l|G$x{?J}_C#P-z zxWNr`?XPHyMs4Q-^DS9)ozaRx?Ti*F*6B^V(eI>6T~VkbB#sUIcX+{&ma;mQ_{axL zoDcPqFQPkCU}OUuOMCBQEo_&z*#-bVh&asS%3ke)J0io!)Z^Kr#Y*f|wdZlkq{6?q ziNs18Wg%2&#Y!~a&4tVGU)P%w@R#@_e90e0zxj1wo6H4gjCiwbzEwxFq~KLUHajic zL^jncTY9v2$$@`%_HQ^V8KASYCw!{9e#Q`?#T1N5c@}|m=Si4HyBatx%Eo}TxK(y< zlsH9Q8{>Z66DRF70yy}|U%4I6sCvrc+gqmS@t6X%sV5#8j3&R(ciW;H@DWeRh_fot zeQ|??li0zHzAfj7l-ZCR!AhfNl>|Or`dKIL0%rxk37qn1(ETJDj`y3eRw^(j@KH>Z zlz~Rt-UOpr{30Yf-9Kg?=aLlgEYRi7*7ZT>s#Gw5Pj$u5WNT8A)QF8#WI{EQ7E|uv zwTe~n?<}%7s?FDWuGfB+rh=E{AqFu zcDY3I+up}(fw+iGe}hY)^a)=Wis9@_<`D!1sb$Rggc7JYw{ z?A7TzWzq5{_9ln7qU$&$Vb@-$SU_W1!-a6WGk7a7b}|q6x-IDYu>-zilG5H{v~6&T zo5pAjO(4dH8||8v$U^()+Vc&3&Z6$n%po*GZX)?Y!$@kM=5EiiOuCj)z26QK{Ob$! zQ}(G-0!tpkaD{m5wECv)r_Sqag_?Gb{GSyH}XghzR_bU4n%mD<;%*J7ckoc04B0q7o_t&Mp% z6CRJ2K@Z*aczU^)&(Fx=oky(dbb%0rHyaxuDrrjE^GxD2-o zjba;;WpXd?>6iZK7Q(U0hlncG{F)1oud(qEU*J9Zzw`DQR z&!dwEA;Gb>HZ#(!n0XE%k9uWmbEFY!(M~z2-ph=LuAyzER*l)Pqj9Yt7+yPyTAIwt zh{lE%u$^C;$v5bt93nHT7a3}~$SocCK&PlYj|5(~L)mO=(!Ui~_W)H)Glby&Md2Gwq--E23xCU(gaxMARIjxo@w?n}8TC;D~@i3Dbfs)~jxZ zJTSt#yu9?f<=psVGohNa~9NR&;ozfBh zB6ir?F;7Av;IlPZ1XDlx0~=)>H~!PLk5CvgX_o!3JB}A)42YNseDMPAR{y0l>h%T` zhhv1zb!Jf?583v;$gQ>Dgn14&LmViRS8I1v6N(g}3K?@~?NYqO2~t_w_xJBF=D$Zc z!tNjSLqJ3~H?w1%q8E8|j*4`nY{ihv?Mg93gW`}w8Vh2hN~<^5(0gBDA4|oH{%G<* z)A##RmFS!_MBuyl!J_s_S`3F*OjQi=5KEwkOAml1Y%19Qhy?fb8-P}BNLi#T`VIu9 zy>l;}y8xb>qE|ixrO!H+a#lU^NuXZRQ*1&}Hs=@j z6#s5i*V$$Pk7DZ7VF_9r+YfOwHwdMA=lemnv4q3*6)ns3kf*!5pS;KPR49H~yVPdE z-}la1Jr%8rSj-w`#fO$=L#nA+bO{HKshhz#4aq!;K> zSfu2%6;&J=#S*OSpn&fX)T!Y%Y6PSO-UReE(|(mZk1arn;j%-qg#~kM6rpJK>AX_0 z2wR>W2h8Q=m-E7ZApZcyLGjwXv_^5o5-+~j9exgIb*DFtO=)T65fn5C*v_NSq!iWw zOxkHGTp^M8^7w^d(87Q1?bXYry|F&I^B*XuKl&|)aoe@5VuDJd9|U*HxGLu|MUD~_);A~F!2RX_l4mL2PziO@;|1}aJS*M6p63HT?gQd{+BMJM0(`)qQ~r7OkDx5RRGA7%*EJh1)UXJx`6C|b_aNR z>sIi0LF|FyVDmxW$%S+saGnbWOG2ckvFT~Ag{I^j=tWy&BcJG2;M z$eqV3rj(f!#la@6;T-q^iinnnNVScP;t0STzP33*<>cvv%Sx?A8DGMm^0|yQRQZM% z{e+^pw%|XcVfqmWU*mPZ96O72c`({?T%HYKt{q?jtl&dos0|=?6<-HzFAd#6 zMSzT0OlgUU{KoA=Lr>&4@$ATH#|*JxP`C?Aa8NY5)9A8=T_+nrEW~Nz*!(u%)$j-X zdj(u08sRO4tinD>Wp>j6(?pxxI=4o3>jV0Ne<1RaglrQ_2tOdnOgzM#3#7Dc%H&yT z#<{_G=!h|@ynd+8F7qxfr(M(jOMj}D+~=KJ+RE@aX64Z(M-)FHT1sx|BH%d3U@N(| z8WdGu4Jf-gR(ltq5?_^pzr}wZ!G}pM9CX#eho&1|;S2~6yeQ7;$T?>2TA_>( z-O1vTLV!A$UaG1N5)uGtku=C0acr6%ur*yhy!Zg`xG$}jFsbni9ByACV-P_%krR%= zO%+g&9Dz%=N-#zgGSkLU#&=#hjpWS^!J#UD2n;*)Mb-bbLCq#U9C>3j7g<|7I*F%a zNl+%6s)WL3pC*H!0B@C84R$Kj;( zy@Q0zsU>L}fM^pGmBtLZXfT5CkB%0ug1_aXD*l7g5LXG-L$I{mY-lH4&3|Dxeu3o~ z76A}uzDN&qjqclwG4wMga2o}Pc_yh%>0@U0BC1H10hvC13~eoa{NRO2B{y*qdI ziK@^jV2vmz^JEfoUR${3kweXH=@I$N&{_uD4m;XIzbYiAy`urdv~ijs*8Q4k``GxF zN|*+R?*(&c4BUVsXOuH^rBKs3WHu6EQAu|Nf;=ihLOEQS0cH*5RRYv7_d8Sc^g>Xe z;=Uq8wBT0XjqS^|wN!P^7DsHtcZnp7Z!7{i72L{St>gLtk2VYZehd<&!+sI*51xES zt9l_Fg_~jD6F9&VI-H_>Q-a|N-A;s0 z;_iPw_Ju_@(}K=DUL{6q`q&niDH1R)S_o|qgN9N10Iwr1viKT13YkZNP$blPOYed>(m~zugEFM>fd0yxYSs5mUEJbR4 zJ}BfNi6(s=g4nQm+N;4czF1ZU%_8tr1k&`Yt)nNV;MC4I;uSQHPizfK?77`_% zn_>f=xe>=JMilnNr!ET}b~=rMM`>8hh>tT2%C_s7!2lgNvKk`jJ6BPy6euBUN|CZM z6bG*H<_#nUk{^#rU@dOeYs+%Fy}EkU;9G!ay0QH2u>a%65un;bZvaLA(F2c2AIB_K z**`0`J;4En3YqV4|BS-5d2zQbVgjda4~=BPmguB;Egd>LC5OMy+9LihG?3&91 z{#aC550&eGHDY=&_@l7Z?tG!~MP;K4DJp6R3BD5Vf{{61%N>gpgUPEEXzTx@z|K(5 zZM;NVKQ?smFGMr2+5~e2oL~EeWMqEH!zohoa-WL2y3f+JA<2!cq; z(CaG7q{aB|FX_n%>YL11RD?lP0R943&i+yuDh`bn3d8|z!$*pegH8cend70=A||RP zi2Kh!Fl)a7H+6%?rw{I3qrRRQlH-!LGS{995u+4|fEI17d!2prv6^?Ri3yQC1Xo)+VX5yln1?HK?j&c6>PjGV=Ee3s?RQz;d$9{ z4|@Mcoi+e|s9ToXu16pXG25Qo5LG@^%LFJpSu;S5{Cv1?n^nssI1B%A+YPa+`r7vF z9iN`~{Bu60RWV(+x8%y@cnEpQOi|C|K-513`f|i>W4_jJ4lJz>VMUCq<7BMuC;}U( zpB#Zf{0~y$3+%alyZ0sMLB`NVhaijGoe{EyD}j-@iRe?mp*8TI?4KJ`va)p;uUdl( zyy^J5dTBPoyiOCdc+3M&fFxNS>j6VuSe6I?7rdgDSARHNi)aTq!A>18 z31dFLgXg>xRnyqrfPevVN+$Y_pqAO;ky>5pHpWamt6lKAorJ|BEI3_eW;ie2Ks?;o zX~H)3$| zik^x5zNYI49yp(BBiEIv%|jSu#syiKtkgkiMyxVGn3o2YfhysqXm{-+J;DeQmU#wXmq6YrMtS7M3hGm~(Tt~Jqb!L#&#PV( zHCHi16h4}gy>Z@46X4nN1c;u!DYx6|VVstYwE7i7N%-RzLTRb+pDU^nY=vGW4ATah z;(p58hv~raMT|Raz#Usg+wL*V)nee*l>m*kL^ad4l%FDG2ahnE`;=ph$#@EyNf{4_ z`~Ama*6Ypog6;8j!55b+Dx^I|NjH~Isoat51|>E?O+y4^>I3D!i_X4{4~#z^&FUyi z?Q!6+?~vtHGFKcf$!Q)`?R>ajp>+fJUH3ieb{P@^!px|?xNrG_u^j$}`Nno!vxd@| zTh_Aw;p`_W(7z`1O>8@Wvf z$;@m6C5&=0ix6vYSB0cQfmJ1C)OI8cqM()f;L7Gsz4Z_=6R-*Vx-VqvV*z{da71Oj zTm&|0k_mC>aAb&3J$)?`!UtMn0pNE0He8WYmz! zl@S!C;T+6R+Jh^zo-+-T!bq16dy|WXa;mcG8ncAOTr<&^W>-fJB4CBLH!U zeyi+L)w3?IU!K`)q&%$_7Y_pscYq)J!pq~hy@~6W67r!2?8rVMbgeiXCP#Sp%E&q) zV5tuIBL8g3o~^2PmO%rfH{w$~PJ!-O&YpB|>oG#IUC1addmPCuW!F6ee z9|*^Rb!nvhcsw+W<%bC{uhE@8Ff40(&|O`*$MVz(RRGnNAedNCb~H`h&5XpcBDX6H zJMX)fy>Y|on?xo&^GZ~rqQUdAaE$v*Q47RLoj877U2|HT#490lcR|Tv-!C8w&nwN? zhewcMI~&ySSPt*ff37G#J??M%XcT%322kZ=07L;OvwVhab`6zM@etbX@8iYP9Pv)H2?DAm6L~{SY*;9WUGJ=Lyw(Lfodw-QjQk>SpzEAHQ_NeJ>tBPHJh{`(PhE z)@6X~gdbtY&=6zdA=pZa_wvI$8;Be@!a_=D%lE?9L6MIV8T ze1J+mJV-)g>nA^QeAEEjZyPtoD}lXi`F2qjlZa~G8q$`&f-hd3axVNYYq+jhKRdco zto;-$9N5Nyr{9^(Hzh2`*V)%paKW)4Kc*lN(y|NYjZ@JnKT*M-RDd=%x{u(YYt!ya zn7{C#qV>AK2DG>X@8G9Wr4n4Wb9U`>VShQDn-@VmA71YtAWXf@2@m%+5LIm zt>2lHbOMLNS7g23hKo5`{EWZYakj!4eS>x;>|-5 z*P(cQCHIHpMF@FgB}RfQ#&F#_9`P_1=FdGMvy%r}jZHq`^j8KOXh@Z%JQjsYMx!&S zd%)=RGwVWqRaU0luP@*B{qdO66L*kU_;0x}-7>3V8J%j%-4t^ETVyhLkq4~!yMH*s zURMqA0O2~jpzf8zv5?1(JhtZ7kEX_&F7iZ6nhv>bR~{ux`PGsv|K#f6VRw2)i_6dl zT~9IbJZWHl^gj5h@o$gZ#*|-8Z+qECE|H|MI~msL#?E2n7s#Za*c&3fnO?PeA-H38 zJ|0_x`!!8M{H@LvW0?l-7?f1M77+-!3k%#g4=mCY;Q^0Of0giE@J}5^{l0{`mfjka z%Wgw#V7!lF>gLE3w&WZ5Fnuf;<%*&*qf&1r1@O?CTLcV;z2TUG?7^QX+&z#!0kv_T zw~FhaL_GGwA-I#_0vH1IE?NwV=(VC%WCl-q{!zm@HPc%Y#5>K04*}ZO1ftiltXm$< zv+}LORzyJ36eyN6qguij3KeCn=mb|_B7a2}K~H9D$i2Yp%epjWMs*(?KJl|_6}=0g z1B57L&a`|Ga?VzCTwRwU$#^B`#7h*k9s0YJ{11ONjXf5QyWG)$U*nz7VTTiSnh=D0 z1ly2j!gl^r;U7SkPwy(;(0?Fo4a&j&f^}cWd_B#e$BVvT1B%3)1VmgBMj(SdfPgb~ zK)`P$uIAj)n;A6vePPfE@7Nxq@OrGf#N<7D(O*}=f9Bzk_%M39*b$}BO(16#c`mo~ zy@)H&o)Yv@z2%2JHsV1ik5$CO@8uiw;X@^`{>+>zDyq=vAe%IL-ka7KvDtGgi^;FU zNuz$rUSROIf1LqtvtjVhQ- zPwc#*jBA&k3Lefh_o{P`4=Mi;sVy}o01af@8SOqjsK~}m`pwi~)l!X@NUXZP%?dOz7%arRJj4I>Ng#+)6z(0Ve`vy&3@VhD93=-SzIejE^Tb#J-Kw`YJn!<=2kc^AM z?mBGJyzXe9eDSc0aSsq_;9*`ol}BJL%w}Rk_`3iAAOJ~3K~ywHDr@JVa(q2b8alf0 z|5fOTtDV#>ne-st3F?Fy1GlJ>^l6e}EbWqAq+2xf&662G3HDtD+=!eu+D)!@Kv8^^ z1O%0Zr?|ya&P;-N^Kb9v)Mrgdj|Hm5MPV9lpo7AOV2pD-LLF$Z1BaIeu%;Uh8(}`0 zbSBmwB8CISyubL|W!5CV`4h~XFkO`o>h<-W=ZIlO4xkDsLCi^>Pt0=Yj>iQLA<|rm zfo{Kx<4Hixn*sk`l%gW2?wnz{`9UHh>du>^h%+)nN~T^0s;~F5s|$I5xC5aRbaF&olmY7B9O-SeexiN&UbxhFE+s$b(VmM$DT$;E(z%m>Kp}7O)Ao%)$7iAC-J^ z)>*s~L6?OwExAVN+Xia6!=Y&2Qgw}sC;Mb()mevoOZq37^D>dmaGVutOzJNtu7+QV z7Hh=7{M(4e9jkL+;sE?48xG3xJ3?AFz$<(f8Y==+edr~eNO8!|_{BU2pcG@Q1i;MA zJURD42@e|UB~O9Sdo%-~D&cu9d?K^h4bEBiO&)nv^p!edAchIy728X_@CD6HEW9v2 z0d2NKLH3Sm@P1AIq65)zA3zSAipqzx%(svb$4>ehP)=I!XABmdNXLtAtSV&I5VcnC z40HE}pZ={TEOTUI@su2vM+);00AZsWqNJYMsV9midU21N&Fz?xAKWEQUogW~)0*%B zzhcIq!{c~bRl&iDO?VyTA$z7Yt4IUr0d0Cqhq*_%W3v6NxJxI?`?85=wWs*oKLY20 zAIH`r_iAkJ$bA_I&|qZEWHw!GWi7A2)bN8#`|)PfHi??#T2OMlc(nbRUr#3>iqjn% zc4RcSJ;)kfneej&%>_=6fwaLIbE7fv`C!RL^<@y$mHsk@ye`n-1ik#hFf|Xp{Z|`Q!NH+z=(OQK-AdJFX)r>^(;WopJ6rogNCVCl zHCxpVI(d;|y&F29t5<>HhD~b6Z`9Rm_tp>QU2@0#$_xdcF8}6=xJQzuIncC$lLv9m5YH{#fH%#r6eL zBl{hR{l0gPvAo$>P3ms~=o^$a zy207+V+etNkkT3Jzc3;W1o5cU*tPgynezebR8w8DlBH#W2lk=T5+?TdSTc33Yjt9c0XLEDZgm%(1zP=sXN-#I=g}Iq{eWbkJ zn19gEgrun@q|Y&ehjZib`fk5D-M&0g6d-txz=2zuLQ8{{LX9QlZBzfM``<5mr7DE; zby-u94}d3fCPfwf&70u9J!I`qtx`DWi)XEXN*OnExGu zBNlkY6B+v?VDvTXh7h-U=(*j~bZ7)N!ma-7=dRkASI!-2o#E@sOpBxghj7+(GDqY9 zT4?^Fhu$J`{?f#0%k*AbtKUgIQvuHP_s>1v)B`Hdqqfys$?xcGses1ArTO_`CpR=aDQj6>#;PxShN|H zBaP9-G$jsdjs2~NWpz=?IPy|CfJ;`!1)c0;Z~cS)<_1lP^Vqv4Auo$;ZpVYDbiYrp zDB-`fRCB8`hlCJ0BXr)S1iGMlOPt4AepO2o^LWAR!>6%vF~-(plfKYO4_;@Vv)8Dw z8Fh!1{di+r|A4xN)?R4zs)q1!x5=;?cfk0f`Mi6cQH?+r4;KDgZ>c+5)LNd8w@R2h z!qZ9ZgceJp7Oa?f*MNVV_3WPqt zKA7}6lhP9jQ{6uL()fVzf%dn&1~O)TSs*MUq~qn;d}%iWux|FC`?tK{(60~q_-673 za$ci*dg*aZz`q`J*_&Y@?`MZ%MZkrxd*U~EdRKYJ_Je^}1#_o8@&1*El4I$K)iRhP z32$L9L;8??dDWVl+2Om};G8YmBpm#YT z0A6)9xBI`XE)0rlm*qiDt+$T*!ZGq0xZAisNDCxjQBISouRXpnG*<4ADzsI!^mvTj zdcujZz0HYJ@6NUJh4Tk;JZRjj7Ar?4CE5>E9rUA8`29-xQ$ZfI;y6ywFW zd@W;cp)cC5LX>$$5A_y0_=7uNJ$V03=sx&u9!kJntj|c?gYF;DK6pTYFW3oK(GAPu zT-`_~o;2Zq7@qGpZM=ARU>G*SgFkT>ThpO=&|CxKfQ247$AT089DtXfTVI|I6aHZr z9-8q%Hq8oYzimUO!NVH&=Q$RW+ZrOx<2^LWS3kKd_2B7>(7|o72*t z@c&Bl^Zuy=&ZHB^E!l?s*7>hXkCjIUL`9r-kM}&3v5%se9}(9v+nL z2Jp(sZeX4HYeHkFau{WJ>z7_ky8n8k`f&jUN}Hl4fi>;*20gBik38KmL!40BRSvgy za+okkFC`MP$G#>#^quYgE5Ay=J=-n5hzfU$^FjQRezc00f`wPK=H{`;JoV12(f0da z%bJJr@?OuppjWb9TlLryE9^Hj`~A~EzJ>JJ*R73=gBZq)In3RiFxv~Nhg5CAS~>UY zho#n}Guie1=Od8fKd_St*3LT`&0q$Q%DTflgFQB4J{~d@g)ZF}*fi7^JiMZ*-h2SO z?quhdoer}Kh@AK*QPZ~a8=Z=nzct_dCuKI*g+WxU2_Eb;{nUpH#oU4w_-8Vf0ie+g zFp%tFDJ(U1%i3W8Y2*Iu^``DC!q`XN#qpFiP3>#^76$nVB!O@E?=fB!i_2;$3qrDh zdL$=GGCgqnM)I`ACVHns^G5IcF~RQT1pe0sPQnal)F{EYVwf;#77Pt!Ly<6DOO*cl zEB%K?qThuZ6We*5=R z7WtjEb-$+u{t-H0SIaI>-E>qgGJk%)7;C-BblhJ|nP~U>V3GBlLa3cGnLDv}MvX^4)Z3sXknKy8UT>ssTp@@DFFg3g29q|N9C&{4CSPk|ZO0nR(fmrl@3lg+ zlK3m9FQoKjyJ8DkSQF+q@y~x6k_4}3FRAiy=rQ2$Yx>RWx#f=l4|QurHW-9dy$K+K z3+lMtdLjQ|Ao} zj=L?IiRdffI(WEGY4v?LF3aan+0g_XVLk`PuKN;>-!Q&jD{9zUP}==>A3}e+CcLl0 z>xa7K@-=|u{{w!Ta_6xTb2$$I^kC*+m-8U>-i681L5OGliVvcRYt9?|09f!ZgK8D= zFAvAB*D4+)G!^h%6z+)S;Mi$E@U?_psBVU7!Fo~Pk+L$FwCN{ACrb@m;e19jc&#!2 zY|2K)`Xu$Ha571A8@nj$zXQj+{$-h;a`(~*9-q^pa}I_kD9~jW6djhXOOv@UP5WM z{K_NnA6PWSGJD|PZVCVkDqt>SqH+6>u5(d%MeQEQZ$@ZBnkt#~CxCC^ zi&nFZ_m=hZHO4g$J%`B#2ds{KO$BgugnrS)6z&?&yu&~LoE{7Yo-vj%rPvg*F}tl_ z_c90m^VgaI+7D~)eUKAg7Z-U&Izn~nboe6Of+KIwltm;GEiOauSX_N7Chy#IL2I|Q)eiba8@=6c>GZIH=E9pKCgxlIaJ@d}OB~1JD{uIInz&(Exa|G=8HbyJti(fNvep zbbvuaxHCWt(85slrH>9KW0DR_t*{&pR7Z056CClAN2DOp&Y9~VfgdXTTMSrGrBkKLTy3P{|21L zFn^vF!QA@?K>e6I@sR8d@q9dB?KkHgrZrKy(x*X900-=hFWOXBcm*GN74PNrr;}D5 z+dqMy`b;mOIXR?BusLD<4u9t`ZUpgw{~>oCLT3uoNm4f8?6vDRRQHN7;J5L*IiX({ z{2@2R%N4junMolJa>pI=iE!OPwQiU!@OU|E9*8EI(H_B_{97;Y+;g zmDt$-2>(S$6TG=2lbrb5`*=zD$EOd@)o<9IM1}hWmmdR+tTiliGE@2|MLndyy{b{) z5=r-6oAi9#tu)-B)qSOp09VT;Ef6>=eeuK*{(?s z#^DEe*$z*+Gm*>tI}`PIu|YpH{+uDK2C}WrH=)dD3;=G5q2RX{{12h|KzRS$B0M4>_N2%I!3dV4Tnz*NzY5UDj=YISukC0_^dND(-fw@g@!VFw z-H9!}zM>IU8F3HE^8VLq_?=0b*GvsJ_vNu=Q}trd$$MWOHUv6Xe50QZo%0>MdOKl{ zy_j6zIGiEe$qNrX*YBusK4UEH^)~+7t^5uCOZrdxc>swQ+UH7-yTI-5PJ}-kcivx5 zi{1^o&C6Z!-GC(a-E2l`-tfCJm012SmA zSBAN>$({jL724H5!25NY9QPcsXhn3qW-y_<;`K2X4R*^nv~b z|3Lo*{}cWv{9o{YVgCvLfq&p1=z#yg4SvAGp*!FJ3^D9*6NpoPmW@Ij3EylQTT5v@ zz+BGx{r~%*)cG%#@Kbk-Tm1p80)D~&fd2vg8~PXazu^CZ{>1(l{Db`m_5u9h{6GhE zzzGEYEvJGaW|!Hu9gTc1h*fZVOtr6QGvW-``TgJbXjawy-$cBCpJD1+!tV!gK!0NY z0sp}N8~$(jAMk&{{|ozo{(uMYKkx-V;07F!VH>b=qGmMG@M0cSr#;lc99=}!p>n8f zWt`vtV-FF@8K$^DmsrgF@E_PgMSsA5u>TGJZ=Ase|Hl3U{sVpBf8sm}|11B+xG`T4 zGC;ho&d}Af<076|MSu;MCHw%0^ZS49K|FksP>01zn&iHJpxA;R_@B_9*#CzAFMNNX zf5ZRGB_tys@B=d7!1u^a?7_0AT~P4?s^^I#&nH*+uB@7A`1$?*UwaTwlMRb_j*`4H zBTdMl4-~Nd3H`wT3+I2q|HS@p>>v2Quz#Q*><@Oxq6Ihbz+Oo|u@Tu=gHSDm)vN2; zg5+YBk&`D<`ThRiZ}=PdH55`oPW5gAN6JZku>TkKe`Eg}J9zm|=!5+y{J{AK6f6+H zH~tHL74auK1QY;h#})Ig$FO7>qfJ5@#7Q!jt(@3tNSeS;ss|nLPx1DV zZ0QCy(X(Vq4d-s9>n(kzg^|!lE5U~)9FHYMtAe|Vt5T52Hc2qJHh92eTqeMkYN@3X zKJWz$UQKpgDehH7uR-zM81P>Co7B@!%FIhlkEkVRrDVn3IK{AMgKiu!kBRM`)CV3g zmah5$pHx(;3Mj3FGblqt&~#7UZpIQ@6TE6w>W}(;OElZf8Wcfr596qJ!Nx|GRW8ql zS+TN2$`Aa28T==?CDq%i!K#IP5Zf+^6QFv1)Es+JF|+qSMxqo~i98Amg>V~&o{u*H z6Ck*QTU?g~>A+gp3;Y2VcmPM1uXt0JW!=6v7Z@B_xe#rwP+QRThYes^r6K*t^fHhl zC&9E2H!)wY*~@h;Vmk=9V1yg+fq-S$p&J&kG6b&$9cc!&p_*pUofm?Byc+g~nytVO zE8fY$8F>ir=3(rSiW}mnTqp{l6ca;sbmut$gDvO>Jj&ENN_CaE#JY}hsm`tklHIN? zxEQgZRB5lk|G+5dU|g{Vz%$tR2*T7$5~|ihSg=pRU>}?Zz_J@Ykd*;pz)BXF5;b>Z z96q>gxT2!qwgviJSqy|h5B>H5@d6-oK&CH9=!BNw(N+SWi!w`FOr`OBVOs$mbW>Jm zP|Pk#9!3z#c5aj3F950X5lbAQmuQ9V9ND3P>yn--u8^{QnYrYW;2XBx52<{{D}fne z=zu=(RSMv4_3R>avq44$pe|TD_nLC2)IbU5j_U&AQV%RdR2%E8s8z(@eBZ04oO>$x*gVM$n-KZ?HS z)gxDqjTNUo%K_z4B($*Y@&IBBQUR3V}5k}=E_DW zzz_otW7tV`)fG3k`;xZfSUIT+QT@GA8SewKOA>_wNX6V%QJep5c-}=nF3~xRx@ae^ zunz|PaD?xWueAVIO6n8#iEf;5hiRJw2TlVQxQbMQau#IyW_a+j%Zv!7@aiuUl)op^R8D=#aw}+{G`}|JosfLQYp_tzijO93lw`X|1aj za5eDpvf-G@AdXzHGbcx$qpwBAIVBitoaNaa=EXR2xdL_}LV{RCPG!`}XfcF%mKXs$vJwGq$udn33F&mG~2!$Cmbe*vsJEq&yw9%u2l)5-hz&tw{r|EUb(F>m;>cyQXl?N zlM72?w)1-eA8CUG9Et|zDh0Xa+whn8Z--+^TW-MabI&tMWl8bjB(mbR-Jd0pSgpX9 zvZ^qXE7ui+o(MoWIU;s~9QwABQLJ7ZF_sh#oz6q}ihsy9T;g`$q62W5+`v`if2@qq zQVD3^WrSN8BHvs_jd($QeHGLwc@+OfUKg(eo+o8-74gMF=1lx9cOGqJV8U zJ)AlEUWTt_#51kARVDr<^y8ylhJUj$0Eev#I9M)W8W3d%Y2Ebe+l;6*AcsLm!`@*P zxw{~(Q(JO3O3}Mk*kT=JB3QB7X;`$$wu|_!G#S%_3ip?V@HC}cr%CGq7;z&n|p>C@alU0+3@HO*~Ko8v$;eH7y5nfpRidAXT`wpWAlm z!7got63dfg*UAv_MJ|(^IjOq`U#JMEc{Ko9>9#YNc{v?&nRE&`-LPG-Eys#sN-mi8 zjtFI${63uAB$}t;iHKqtuwe^JRoUEBTqaI3^O&SnMBYvSb67C}Z5yG<&k{2n zgPr)*xy*f^Sf|b{!`_1Ak_F)$j_+r;2Ns;1_DM_9rvA3fO8P7GM(M5EgVY{_sMKC# z#)%$TjnV;;O-md(stYOU>>E_M1XRq(p5*A{8!AU$VtG-0HLB8L4x`v>B$$+hKIH z=tSFdG%Ut&a!}bW@Yg6%hSi!kfWZqwZ_2e%Km=Q~u_BD)&=y_hI$gGPxtL^FTSX%lE7|S|e3}t=hvx1fPFLVz`|JK1&QcT3lwJXi5!mhm5!uoTA3l1glk9s1K@Nk*rU)cv+qFip_4*SOg6%Ju9 z8n|*&NXm~R3lhPhl)BqZG> zG5+w-yyAu}Z3E?S*wVKo-zLQU9j1ed3sy=T^RnQocWkJq)WN!l>i#HitW_5PNOa1a zq6BUjffJkJDO>U_Ksbd62Dxy`4i(8rs9`OHQ$;o&M&3p%;1yt(2iAc&B2{v`kJk*O z3v4FZ*|ao00FMYYJglLh+gqq6{)oi)cQC~c*!>y5!@3z;2$u|i0wM1Q@$j3ZP`aP6j2aNo_2E? zrht2jpy)xpn2bQ&CV1Z>qA5Ht51`o~UJ3wg14}_h28f`^4g}2uj=gZow(AQy2-gX0 z*>Y^iu$RK1R^|TSV0hwRk&plYAOJ~3K~!gA4y>R%ARNfJGs}6j0eu3kYXN}hefunZ|QilhF*^?i118E{~-PUm##P5&Ll^bMI%6Icdz^YPkPSYUH(!K=Leun_ssRG zsd2g7E~OGd5Cn+hF754iUaqqO@Z4C)XFHn9nio;_j=7^;30mFUo=lk#nMUoDrA)aS zJt$S*-s!R_qc0BxXs6tBA|_!)fvPctJ=Xq2vdSFT6zGXn)?oo7+U?UZR%@yRy$R@o zxGjoMD%v8svs{0ro=hnqOVy*-wYdFnJ}7;HPN7bpoi81S9S!I&F!Y~1)!Urj4phJt zJ*^8i&3CH5>faPbF&6fx^`4i)2y>3Ad`eQm3?i*4Pk$qP+Tx?Tp$A)N8V- zRQNbbNEYhGIGX~74R${H%7H)U42<4wuI)Tr7S&P{)jvI1fD+um%33gym7^5@P#`~2 z5Nyj=rrf2wZ3T5R9|5a0fn-032Ilyzikh6P4?(+BJQ+-7V8>dH_hdVlXC#!V_kZL> zV3utIiGWGD0}Vji?=Z2HAI+VPlA+ub8jwRKM!%P4t||=c1>9c6m|8rB!m4AEtRA(0+kF*#TMplUl(=QL>M z%?L}_YRl*EYVbJ)_Poin=JQ7Zgq>FE=R%jq+a!b#Hk64>iV#c+sG(LW-O)Ea zGz+hZ%~rKdKF~A4zErwH*P~%Q2%4|;^X-D(Nf=)1pJb2qSrLZXGulBwdG#B@uA zz~*#i2BIu(UPhe@)_xq?=is?^HRERSSj^J**x0lXsV4ok@o#c z>RTo7(b5!T$jnUHoq53{Ke-s<&in#W6d2-4R&g7BWneypKgOQ{V1=}S%qR2ZNn+ka z5Z~*UV@ecF{r@&~7E&wfqv#f|Vn&9EAt@2yC)x5=A;bl3VS$+@ghXTtP87>k0j6qL zHo>PI0^Is3z@e~a|DOQXPUUrJEuQtCtbUYFePXAQfo`^=swBpzdQlm`@&0@g8sopb z`y0ctG5)Wn^bkQIjm(i3Gk~3nXi~9aZI$Ok%tWG5|7aSUNJ&+g)#PUYaxBL{U=0O* z6hUX@IwsU>@7le(?qdB?$U}h5I|>w^mJUUcu);0dkxFb4LP%L6-6-gz10Vu&0V4;0 zj$d!_0njfx3&1kqV7m^$3l~8YRpL$J@n3iuAf*I$#e+0M+Txu()zX!Jge6;oWqivl zwMY}U$mP^d0WixD$*#+WWP~KLq_Ge@Tlr8yY@Owf6#Z91P>pKkWmsWkf&c*p5TJ(0 zngb+&lp*p0Zs{ZY9|`g$t+X%6GYPur;KpUTqtTpLM}jn1=7B@0x)yE?*sS%xDU?G= zdL$NT+0(32~PlNL!xD0{=Cv97?&V&4fj`Ko0Hxsa=f0;G){| zDgV~vfWJ;}Jq{-odxxwkrP{BhVoGUhECxj1P~;f_sl9wwrlUx^5K2mvH}%;k00#=y z`_uu1U5x1beHP|JhT>qKc^o?+^!)_qb=3Mfu1BbZtiWVNYbs!wJB^8qM2ab9dCUmG zBn4>do3|5YDU$sYK`E)h>zc!7le`}U_k`YAKTdw|YyXc6ov4CQ@ikcMjQ&X)=lYjfgP?r!P6br z*hK;?p_0%5OnHW?*;84sz%xV1u8o!S282QY%j0(*U%qqo_~gcIBc$=+rQee=yxA)a zqWfh)|Lsq&JnZ~m!=nvb2&HJc;fM%X2*{rRE~`qULq+lSe~U)XuFAbiEJzTu?Aqt5 zUM(NQ{4#sv(qBli{AOF^19PAbSuK2~s^*Rx=E`nB5=u&XrCeCV9ZAtdil&+uzbsMo zTUNF!<#gl{l$?qNrdC`E!Gu?3RZuN`L_x=su9rg<6P&aR0P|5Il4v63?zBb}f6`KL zfyloyf5ZiTvQ}O>K}BqDn5pKl?8&uz>qc1}aDx~1R%G;O2v23GhEDeS`Cflpyl7p^ z$1M@`IokDq1aAt-pM(iNqRGA$KjMP@iMrJ`KXOA@TF44Z7(5Xs7ox4D)}HDN7J?<* zheHgwH-dh!zdbgzURHigdtgbF~-3%jsKroGpbA?LhlyM3vj=%AGKeJ%T9m z#Pj;U=7lSXaoEgnEC*U2Cl7n+>*c>4(D_8a{-|aInujYWou#FbN8DLu-l`5KHh3j7 zaRcT3Q)VRcqqL&9hEC*g0j+#$;J?@lX53hmC3mBP8A!GmF_gqOuY{p673iZsF?TMfo09et%EJ< zEHnmM_$KgfEE(NXaSn+ZSzI1Ew>M!As%%v@SNtsA^Y6lz{ zN%K%k=+xkJI9ef+5Y{m1Wgp8Qb2%ruKc&7gYwj|>+F)4w5)go^d)exoAsR`V=LYac zRMKEfk+?D`H-Raa0^R1uwxP^4Dw5`1lO*6mG5{pJ>?XSid_wUFT`v`czGnRsv@{fh z84_e_w|D{qTqS%nSUd_}I!I zk(TC6Xi>p*AWadXV@aF+-VK?Z&ANk$#eNsWgPMU~k@))0^E)RBlB>N|F!;Eb0hy* zKbm}?S36#p0lJ?DP$nX=VLKyJffd@4m1j+PS4}HR%<(}YV2N~{1`nxe^aKk=tUks3 z{p;BO^$RBR6qVnRpns<(*FV{^IzQkN5z17+vm|^ile0{AZkY%y$F5ffM1rQ1!4#33 zjoX@kv&m+Dhc0Ky^{U76U$20_*FS$6bz;e}bSTrJe=ReRRltB?nHlxX0l`vfb!wri z$CI!O!s?6d0PAh9K3-Vm)YTnj_}v|8T)29*3HpDq4dvWI70PG;RvEOVEEN(rED})& zLfo<~-KgMYmog-mrJ~WtGDg?&rle$4LWWf!B(Utk4y;_*ez~6daNrsW@7eaJ*r!H} zES+Y>iAa#T(+CC3q>M-~(vlg8Eryt|B2*$_a7RGgz?U3R6G%z})@TlLtUx6SZhhNk zuYV+Qy{@x<*8UHSLMNxSctV1qmAFp|*=aKC>o?Zg+kurCz!q*{iIl!%Kk~nGYpPyA zrpzM~Zpc#Twy?DeQ^rUzu~ru(YyAhll_fV9L@n|aaT`}@Ol-*2gi3c9D8?fju`2`E zn-};e`3>CSops%Swrr;_q!J;CW+n7Uh-6X<(F&=~Ni$}`63aS|LHt_(YrlUvP&EOA zgKE#&6=UwvYbhaAA>jrg+ab#Sr2YcFg*$nHk+!8v7Dabv`3G_VLD8w4Ra;A&WMDFK zXotXcf!WUJT?xbB&}$Yy>wTXO%#o-dRI-^<=L5QrE_bo#JH;)_+yIn{ie5FiAc<2V z?C7p$8k@)}u&pP!w#=jmy8aCo+u^K^FTBo*Oq?DP07207&3sS_O znX=1uk)R`YOS5WvUC)%d8v|7^E)0i>$F*`B`2QLc_+Pet9!WZRK*z8m!D}l@7oXzZ z$efmW6;0CIPe}n$ z-6;gHWShKlmwfdP^p&`w6n7x8GVe|EREm(*8KA;0OTbcM38z7wMgb#>hPLjfJ(U2Q zEb?VWU(bm>8E|cuu?AOkxp1qDjc5;r6hiLEEjr?s?zAllnZW=vZG)~rZh<%KD&x8? zX&0wxG-Wg+A%n6xxMd*lWssiy>jbc)BKVq@-HBms{*&?uB{r<5vqSSn)hEmUtqQvK z6ZuX1q+eMr+|_4m21W=-*64zA(CK27NmULawJ`ft38-q`P@=EgrT6l}F)PjEb$@EJ z8~{)wm`A7a8alt;a6>-5lb+ZD3SxLr}g!Q@7LxmEAV(RmNZ_ry$Ty@kp1#}i`PXPP9 z=JP411}4FD)JNS4&{bHJZgP1PvJ8Y&uPG|ytYeE(Hvy2YLWfmbD)@X5m^bZHd4TnR zs{gir`*l;*S@!FVXhz))2piz9PL3DSU^S2CUMq4(YDX?0Wzd{f1+tu_jp^0zoTRU4 zI{`)t?l3ixOW}IC&8G;Dg{x0S{XP|#CelZK-p09wZ}Je)cV*qORDNI=c;~*QP;B|m z`{wfe7#u0mt}V*+==gd*m1QPjSjcE-1>ky#YJO=~%P`mn;Opd`D6Nr;XLwO1 zfFa)UKf*t9CO(%uGr*K3GSUz-D$*U9N@5u@Gf5_#6^13jSn`xg5D$g*#tmnt!gcr{ zat4VX^J=JZ&p?QFEvXdui2mMN!qtkS!Y- zV!E?H7g&=;)p+xGAG`v<7uy35z-&)9!ifYaBzkaY4y3DEAAt?Klpnx^f8^hBe}I*? zD3jf}9Y%!%jIQFLXmpvef?KWMS%MXe8bX0ZtZZlcR+t|~ZUEe|nRAxQ$&~6xD1rHr zrC4=~*L<2kz+d7A_D}i>{dLhrlYa?Y`wOC|E3W8B>d}>=daoWrj`WGwhc`6u0! zADJl@+%kb>+`J4BQ!Zv>C?QRTPIBOi=qUd!pc*kWECl+{_CpkcuBzZb*8>?QP#gqQ z=?7H>Z8$818-jAnNqlmD10N|PH|$5YO3RcoxTSb`Lb1&asa>8i%N(5t}{MB*Zt zSan^>tiL9y9301@u#U$$cEe*XHLea)wg40?rT|tkz$B0UqV~Bw>na5W4^-uCHG0(Q%jEKLNN;FKRIHWAx7>q(HV3qf*vJ zFQSkeFwx;VCFY&XY~WUNb++j8-94iLwf?gBYy5uazf*}hmj3lx zEr2Srl{>8$F`|6|>X6p245|u~Dn#9~ANdk?Zi%<-3%a!~-7Y3@Uk1pNg0I{OgJ816 zCcFqTB#D%X1v#sf+FhMv`H%e&UzlFhImE-i9ivjaiFz6f$=Ond3S&2{=1JYk|B?R{ z`4_Ph8@T0{!s|s`V$uM~mr!a(12e>~6nYIL5x~>e>1b8!Ggs%>j|~O-wwd=Q71r3` z96(JhY9(dug~Y15=a1;Jzr!!+N_>bb?V})AD!w8*Hn>y+Tw$<`u-aDTWJ|SvKG?rx zDH!j0kVL5{pQ+rObykV%oz76Tc!KBTqP(~O{D_zMkJexDx!@n=C0mFq5#2{gSLs(v z7!*Cr8?tFydT>ur$xponFZoB&z1tAUvf#Yua7!M#l ziS@jL(xTKfK}#XvfplfUKdB4r|0)DA{01yb5+U z>c_1L1F8m>nl5rF?*WX}`IaqAX%2Tb|AtADzjUlGYu3g{HC+qaTHecstOU67qTl4W&zgD%jNXws-;UDu4vGxWGGYQ=jA?EF`b|2)30D zzY`Z3sSRHlH9wa|Dy56s?eD%QIjdQ;iE_RWC_8}J1hfCL|I_5Gb6TJxjKS^+I9mHz z{|&gzpik-}{sXhjmE3U8aL#NX(JF7`frL=FxT8obrIf7hf@)=`Wmj`dZ7+XkFc0o| z{Lk|&=Z^#UliiICRHn@;^e*rt`zP_YY$>m_OZg3p^o?-1D?;!T#DJr-b|ws8u&i3A zOUarvVQTZJ!|VFD|6$JpdTdB@GAmGJrSazt4H`=DmK^v<4w?7^Liz!y_yhMs&0;mH z@7yj1jj1g0r;?>jq!G8m0b!==bhPb;{xe*(_VKjE69B%>_BZMoQC)FW-F$OgQZaG$ zm+1DiVv=T^8V&pdGms6Q81-yvcTw8mt<1m z5knHrO!ipV8Wv+Lk9kcPvez_)BUdNzb<)S7dVXayoHlG?ObfsUucuiIAp>wHF6txP zvLEHT%u<)!>PK|LSGW``m72Eb3gWViv4g5A84EIZQ{GJJIH&OhgD>kEemzz*Y_R97 zbo)ZRh@uFRFnLZX8$!bG45Ni`OFzZF)(o=f3dc7{$>1T9WM+U+5U!{OlLd%OB}*Ft zw^lzNWw(y;RFP@Cd?&y)u9rnUN=H9-!oy*u33%2_L1hvT6gXX_K|-SmUZKW zRbR$Zgp9p%W96UWZI~?(8sYzGptZ>WG%K&O@YqkCd>)%GS^GnW&e-AvAiAh7qca3T zsR{$UVLKz>HB@Ki3hfPJMv_WKEk)J37KCLitMdeBPE**b>etjT-V?D-Ps!Iv%wJx# zzez(W>FkPWFi@#9RpX3AKzFCYxD&VRhPL=gILM@dOm2|y)qc{LBHR&56PQSAmuD@Y zy##j@9`y%!V(`P#1vQ66Le+cSnu*cj5+i171gZ(Y+VkXJ(k(Nw6wgEV)bb=quVPeSk5gr4xHLu=d&Pn>45$6sMV~<2d0_80HxEX zS{3=uV{Y`!iXV8jDE%c-+X)wz_4dG|w^a12ABpQwrOcd3%P=@=UJW9+5SxVLLZlWHMngswgC~g*7J;ee;fzPGmtgoh&T$Bn<4-}A zEL`o3>TycDTI z?gdgok^y|tG9?E^MWsy-^YgH9BZ=<)o($l{l$_HsGkiI0VW+?4hAWR3Ry9>f+`y7< z@dx@#_>=L){7HAo(u@KYGE7*|mKMo6!V0^nGSeZv8#x+q*;&pji7XG?HLw@x`Gx%} zq@B0$Vk27lU3o~#sL}#uh(GXe>63QDKgtDo)Sx?O(y4@Hity@_5qwqRl1nX>5>-L> zrNMpoG1PG+maabNW8x*QqR&zbDYNEKcGRjwvh2RwNrJTGmVU@T@^6KMANW`9V%t@H zl8kgqC{m?8PgqDZYr6Dx&WzqnKO@mGE{TP0Kmw*wp-e-(5jDQl`nZA|Rmurn(-a8n z(XyTW_VXGPge3&s!_hikg44$&v-8aP-npQTK_Dym-)_J=Ea`B z>MC&^V{I6qTmF-}SoLkYl`g+}OjCxO%2-BF8397>T|}x%V7+=H(DqZ3zLIv}Re%9z z|KGLuvwq?LI20CTcgYlh=)SNj4s%Nj+_FE&uf)RogZ;O-({Er&0Jp*`Gd9a6Yebhu zLhn1p#f(nWOF>Y=(24}L#tOsgM#vL%Uk)Jz%I$9pR{Y` zEr)QW0;sN8%P?6~?u(#IDKaJN*?L8o9Sq6=5YKSp`TbDP%^bec-+C=`RL?`@kQHj8 zq6@ew-dIcbNBm%YYyYGz`d8w!0^m=s#ceVdeu$N-s>=sp+SV2q6)8s#;lK<6Nr8we7*wDVTu@?N zzcTl^QAn5Gl>tEix&K4QwZ+$1P#;xXDe$5ou5N?>uW(5pu1~_TO>)5y-||rGlJ=IV zs5S;ElAf8Jchyd2U=8!eD#J`eKxZ(f%bsXGPf?33u-Cpx8cg0ZwbburJhk!zM@sPj zfgI~QaTEWffVC8Vpud4n(h?+13NJG$cv_= zyk{n!8h~A$vJlS^pz90A8;vI|!mUEYi@8FV$3{DA)0)>Krlk1D0e!>``^mg`{RpP* z!VmC;^vHy52;xfCI28lNCm|slbryk%TY#-*v@p3#+s}Vs|Mu7av zl}ZwkvEjGy2eITXUC@u@scm3lYc1^!GP9~UI%gC;Z;&A`Eu_R^p)tekPWn44riJS) z$bNgG{w!OD+Ky)mFa3l)*BJn4zy@nF!3F=3F4#x;1&@p%2{6q8%dCvSO7T!q?z*`} zmXxD=2d4I?9hG7L03ZNKL_t)=*A$@hJDa2)uAk3eHv}r}@Ng_RVc_)%5+|g2Aes6B zFZg%nX6x%c^>KV7>Z$;PZNZd{JBV&i7prdz${N(!*53&9z_PQ=hRaT6DUa;W~3%sPa9{s+5<8+jLHyi z@E`cf_;OFgEiUl``+_oUD~)7&0#G-7rwR{8i~j0Wcgo1_E5r#&^8&6J!28fcdf88% zj)q71VnHVaFi-wnCoz3fAH^mAR(`m?b8lz?p~&Ss#{c%4*JQ5?2c5Y$s0* zz+YQChV<|Ke-23pujH{2orjyJz^ljycp|pemL>-8O=j|jeJLaPxt7N*_);JdFkUs2 zBqa3wmfSLD5R7pQ4o)O~p&t+>!^LaZtmY}Yvu_;ayOKH&Fr%V|tcJ9O8}Se9g08f` zU|aS<&9sL>pYRJ&qc*89GlhDh2vS&FF_5b$UBTB3SbMD;0I{$pF3s!GY-$|8tIWn8 z@4_8Wvnl3~1VGw`{$zf^T+4A^SRd^r-tg(a7G+kFYRS(a$Am2E-kF+((-XdIU=0)I zW7`v`*4bIi_ciJXkW(@~8(pX&!5Z3Ef4e0Bs{j#f3m?S~)|d7d@PQoumHD9?v_oJr zxv{Pc5h=5xKGS7nhG3+W?8*aA40tKwdi~8@JvM=QmsQ}knO9$la9>4YUHEEyj1JNi zu)$B-7xaVWx&NZOZ~;4u-OpRga#OtF*N}KsQ*EpQ7lLqQREJm}02|lSsSQBRETAW> z37qwp$r8^%0-`|Hqylvsrj3yRHhd>OhztCa>zlfPKa!=0blNQ_OU1kE_Pne*WI4|E zdCgo>tjh!Ej;aN;8n6%P(#!svDXH>%71--4m%c%-xgz}&QszW&M**ttH~OS!_%h{ykt9QfAttdh{`5NEiH}t*XfmYDo|gk zhKnw6#?^l&>+4$@`N&Jfdyr7H`=NyE8A^R(#yQsI6RH zb3nV43dypln8i}}5{e+8ReEb4!+7d=Ia<9MnLZn??nZz4)7Sb>^c`tUGodz#LY?+- ze1I?QfPIq7{39*VU^nEzU6YQON(;yYlWL%8!VO}oD-m|geIVxB`;#yG=?y%u>MXVY zOG$lz&v~#{;Zt+-gl^{->W0Gn1G|7Lb6;kO=+Rn?;&MfxW{ww%%xp%`=dR+(N$gbw zJN84<0+BQ5q4xhxU&IZJAdRC@CWf1_z~-d&Ye!if|;`0vlUek zHmLx3wRa?!*L92>gQ;IWe!sIOjQt<)Z!i7Q9Ep?Twvo@!o~h5<+eDvow1C2%}Ra#c+8=p>%)MqgJ0a>%)7SPl3GbnpxQllheq zw2$-+Y}A}GxVw;DIgp?K&Vi7IZ@0R zt|Ro!6#+i55AsL;rMP4Z_>w-LOF@Ui;i0~zGf3t(D7AoaShibCNRu5}3z^JXW>u?u z)=yL_--;kSUTGv)oQR9(NjT5)a4!ko;79fi-SUs(Teg9^)H}2(+XNHg>GZ5Ix-gQ6 zAdp-&oRu~5^008F!3Q5n-?M&u{7vuwV4~p+IlTXz=056mFtSS!6!`;p=?m*h-g$p6 zUxG=C7D|d6W<+F=jAmGqGBYYulrmd<$87SaW7Db30OW7`sV%HQTX@;eIU`4^V^fi_ z*7>2&T<4eIANrDh(wDG_PtD!~GAnOK*1gD3N;T>$;ihs>5Ega%UJdjqUk{~QG3YtJ z0(#c!WNP(cofTCcF}!m@IzAwy?#?S`qz(H+T&X*4LmRr4PTpFkAkrkO!JH*%$v|C^ zCYzM5&f6vdU-SMs0M1%?_|=+zp8Xzy$GHk>*59V`gg51D{SB+@I}>)p{s4caz9a`f zfD1QR&*;j4IS@)CBAdFJo7>c&zU-f?1FJLz`B_Ep|2e;B{p>3zd!NyX=i`eCN}RUH z?k_PFD{D8_jrd7@$+wWuP5UHF1WWzA41r?40E0{(V%;#3nbsFzo+}L+^^X++#KJ0v zQsbR`5*Z9J^JHsnaD8t3wDdIZcBcJN$1Y$Sv7-PrF6<4fi3L=IsmXqat=S1(|hNm17yT&X)Pit#i#I9w5<{)jcQUkmj+7xXF*WwL&|@ANt=} zno9yJavPB~P1Dc~KE(!7lu}tYDtFG+Ti;ZA+dP0Zd)PkotiMd+aig_B18Ryti+4oM z=+;C}MeW!5f&D;#CldKdyQn2biRh6bIy^;FCM-zgcCi5{T1!3=H^J3RF6M0NU;Ce9 zW$*p3gW)gB;qUgy?07zb4Wl2uVL!^h#Y_aaVNTrDTdoKpX-XwBX*GtGO2ZUoebvkn zRBew}+GAx;~e{`_t$r_RmHuQS~FPK`;S*DQ~xs{ zgBHq<)^}aE!u6wcA*skrTB;{{CyfFTN}Kc=HS3*C5(n1TQ+mNm3b9~aPnj6-SbHt_ zxY2C#qHZsH9qP0G1Z-xd1negMg#86iM1p^0hIAQ)O?Ya)-|D(iWRFR&87m^0&er2% zqSvdNhs46m|IFt97yDOHl@%loRz_RQQy38HlxKh+*eC4+`-XlbC}Y`9v3pk;eI+0$ zBn8#vJ1bM{!GFE&h6njP0c>#rh&dMfRki8G6pZ2waXh*F0n`;CPR_4ZvTFk$*k2wY zZt;T!>?NX%U41+IArK#+x_G0&TbBbVg_#Xh^I$rX8u-xLraIgs%JAq2S zV?cfGuNWqnkOADVPuLffu$_6+4MFLK69LiS8*#x>N+i=*GRUkK?kopn4xZZiSii^> zhg?3lM)Pe<&Cc6F3cq~l0TB<-Ko-?aBTqGYIr5L9oy^~Q7}87da4S)i8gl^Esd=LU4ZSAkh}J#+1l|2=>7x< zc5cC_Mv{ko;-j^RCz!Rni8>F~uIGsv9Mh>^*`KJcBHD`|I2ER~w4>Vgl}T6K7LZJX z)sPF+lpR2;Mm#l+>GhA2^>~=Chb{-m>aKi_bO1H3d7kTR#$!wx*zhin*L}R&!c7n9 zlSXRS4cQ8pCwrQ=DGf-O0b)vSnRQnQc<#)dToP{svYDmR4v6Bo8=N>XFGU23t*PrC zD$EoPMHj@nQ3DO4^#Q-&F8q}2Ds#~S3V5Afie9vJ)q<}Yjgl&)R+@SaUi-xY)&9Tj zr{^xc_^UFHI`qM@8or7H0C=3#(JJ;Sz@~+CV|{X)bi*#_)7cwMK`zG*IWlQzK3yq! z8BfOm8$eUpz!}Byg@a)J+xkyYkVIH#b;k!iFo;u-ELm^@8{E(b{(-*0Pu5oR!hHiv zzLA!=EK?Rep4p8~Z3&d+>%zZB!lVhZ6_~$TTU(xE?1z)M3y_E0-(?}r3Fd*m`0>X>%sl~Ug@Z|3pI=!Dob8epiR(lb!WZuB8GK&!4 zE=m`O&4Ehig6O50enXK#i?@o|=RW#l# z)`?*WZdf3HWLI5~nbkC47jw%TFl1;kxB5C9W7T75f+_EI__R(naX_2_P-iflR}yUw zjVsd%9I26!^+;+>Fv}Ntn)4FR`YQ%2g=!f5V3iyq6aim|`nn0Feo0s&ak*sxf@*kc z6;9Cy-~csX?Em?v+kSh&FCH3`3jFq=zj5gEZU;1Ki%AQy8Z~y(Soxx5&0q+3OA{qa zCzTv-8{jiv_JH<-Ne$`@xWx@8{Lgyw5JV6HLj9LGZBb{C?}HBbq1cDn*a<$%~ZuXzi0nXUZvwS&l`E||C7pVfKff{c_?); z#x^r#8_N_+xR5TZp^6uLE3dwKJ|%`oBUQh0%w~4}u((rU`R#uym2Bc<_TMHII24f0 z&zq65{qK#&92N5#xWOd`6lsCXWD+UMx}c3nIWmm&xs|MhAw2&= z#{r+r1ORxI61i*$YW|^*tKpDw@9Kqce&z%Qn;(4|r_~3zBU#D=5kkm_y77h%ZP~Ch zk%=U|rteUIC*XwzHKVO{ZagJ`+5ahLO^IXH|2nREwzuqm?EWo07#A^N0APY42KsxC&0cnerW3*Ecu?rx_;2w8Udvp$(r%)z zrwU<_S(7PIOkQnSRl?khQbN0vA<0%#I6%{_H0$Tv+f4KC+3JdCeY^`pv{2z_ zW2t~`s73*kdWm^{#<2{&TNH+UgciC2r@s?2{J&YY{m@h>cl zjIS3hjtYWf3YAy#7!#XAJk=*)QnRowCP!tGPhY-j zOvAAO0)4U7F!n7YL+cz`1;G|YG`cA?VXw5kS*zi5@%oE@tzQA&)DpU|C>Tu24X*ke z%2b+}7z&a$-HCYKe&OP&2A{Iav;V?mDtd9~FS7tnBhnMY$}jZ`@>eQ0&!T#S4Y0(B zB4I1F<;%KMk!i7DP-K`Du&M3{sf6@A6J`$3;qUdAiDQ84d;fb&YeVKR_jl9_dQ}ll zRqZj`8-6PnmauQ96MJJ>4`&W#4R0(+b)jU?s*##PDVpplLLM1qum7RW3J9xH89&yJ z4$P0oFFBJB4n;?cU#B2|nmn7}$s?0g+Ksfvp@^EujlP)ImX$s^71lapJ(kltZ5|Q9 ztUt$o&>cYSxn{s`n@z9syc8Q8Jr?t?7{=0pZK!6NmeY^E<&BhuqFUdR9*FKtgZIFy zss`6je~6UvwB`uq6k-|M%k0B?X;fgNs6?eIlxmE=wd zx1Q$GUrGTpGj8&jImI@oPiOztL96RnlOVMH*wk4$hUCqOI4?8P;gl`L1Ji)RpeiF7{m7*qg&jGO4y5I2g{lV9R zen6{x{-7lbW^CW2DUzftP78w6bF;~#c0I{%re#0I^^z2-5j z_fwqDZKB%niKu}&n>9!TF5H=flA;(0Lk)?nX)fuYdS+xDKYO^%s?v(D{Xf9x2xFdY zw8jzTUxy@phA+R)?`!ibh9w9hAfG;rC2X2y-Y^K&aWfLO6$nZ$SDSNYPtrVb>J?$G z>7eL~wDH>iINNDDX;}m53C#0yN267L9MIAr8t4xGTVCbgxYnKFGAI{qLl=;u(^HCs z+oDqF!4rJnW}~qe0hcsY^jUw|LxYJnA>pYiRqX+%M%6f4FGH$d-dq+Fwviv;!V2vz zEUum5b>AN>luo3)5`ZmU=v0g}o}bOxFixqY_rENMrvguaXWmaE;wPtn&a8lnhbO6k#>ZZ96!cG}I`=-x+^_6#;vMyDc`U{^b)hHld_EQg5Zb?%zeMC)c zo7Y)?Pe#>wSREXVmp|P=U8;FAmvDnO+>kEq^i|>xsiqehGHNoe(W4qZHNn-Mo$r1X zzV=^u$}%>tvfhl%{^!u?;z7?%E&e(S8q!6;U3b=j0R$Pcx&|tNPt@d6Uwt$LCl_R-LD*15mi&fOztur12Oe{zapADfsY0$Ds|tJ}ZaD4Fwq8 z`p6JlhA4z6L!tgmu|@_Y5->Yc^#CMf&$SY@J=a;=(TzFJ_QHn3!YSXKyb^%Mu>$i` zCwd(>H0v)S4v`GP#UyhhSff#r^iWU&O{aAi22~)K@I<~q!r6Z`-YwVJ|7;(P1MUv+ zJ`ro|egMe}{=ZY1^UBMFLP{Z*rIfM&fXsrZl5s%+m39GOcAHlHj7P9WNVpy>_OkzH zOVQiTIm0;LU&rHE;=3$-7Hy}pf$GzSU>mt5gI}pFcYx$pT>VD9hS`Cvfe^FBuMo)z z#@DkRbY6Dshp)c^-X-EL>2c6M-*u+5?R7@#@<>7n_MK={NE^J;qVI$9oPt7C_)5`1 zl(r*Z@Ag5JzpQN9tGqQG5@ z`eLLTBHT&KBm{;tiOjk6iGa*AyO0fVu6&cc3IssZ#sHD{Jl9& zcg{H{{TZGa89Idpy?&i%)A;5DZ{Qmihz(M<7CfT&jh$@P{7$bQlMwtDPdxv6c~Y(i z+E;{o?SHe6<6;DH{0@HOiRWPG^$KR^4;Rr1%`Ll-8w^=pQCOENC7i0svo$lMj)2XZ zdHu+-qOpH8%Iiv9t>0TJF=c?4l;GD9c#Gi9n&0Ol$9@ES#BX>=No|=cGp&TU-4upm zBtSx$*%LF%Ow=%F!Z6qLV~(i{8!#g4I=$M{DZiiZ#OWO&P-A#!BF(e@^Q-RiG$6?P z*ndrh6SCR}(Vfhdu6gS^1h8fVjpaY%i6OoAzt&&ZjdRW~0IQ1!j?HuoJhjPxh2Cef zR%hC^{x2Y^PaAMi$)k}w zF=L)Zbt^r`VZRym37gefm3LRBv{82u^{uWo`y`s5Hz6pdth9YS+pd0`PBj=y{m z%1;mz>8QfaD{Y7HDlQ!_b3|J&?D2S_*}o#AtujM{tUfS&pXD=?4W z1p39upFGh9n$}6EoZTicyc|HHn>HWp|2F!^mR=7sx)juvY)OR_=#gp?!)O^eH~Q8! z0lnb5X&|S6t?Ii3@Y?^551ehDt=1_D9eL9+b^TuaXe|0;4UuKzgNT|sYYSQPRdQL@ z#mcbAs@6A8&U+wxK z*k(c&NQXrhQ&S_%K*?lVb#*YDA=S(>cg+6vCn%Tt+J9Il=?C=0VN_=Q)@NhNcMq@e z$}e9j#`DMB!|z(xq>?|>l~vF}HTa-=eCnoSboDp71KY=Vp9OfpCvJtry?v?5N1d2oS_z7zlPoH0W>7 zIx1d=<$*gM-2_cMTfx9MA9IVpV7nUm0;ITf$Ao<>VQ*P>abV|Bv=WRl$|C3w;{j{N zB6@y;|6XaIPGDz}y=pTYYiVqeOPCk}pfl$xDkB79L%Z$13Ygn*UxwEEWuXpL7e-lMXio9ink(TY1I0$Oxn zuY~|{G9qrdH)oux?GTKO*oyunudPg{3l-Q0fpHPKnl<=p7n}ID9oY13IB%dEzJV)` zbR&A;oF4ed5Evy!JSMgR80`(n^PM9feL2+KV^)Huk{b#&*0N2^Th=w@gyScQ;Vq~jjt4CWR%h8_+HV|yEt{s6pZ=~;ygrJ7xH7KIeVE9ahTD75 zttm&AUIAG=cK&Ddyis5@{ZMcNJr1nGL@mnc#EKjKI`BeGS4Nq>q(2znLo7UdX)41O zHmEY$?3^AOoK|brCkAS_xsydPp~518E;*Temdj}--!cmfnN0+71gEOzowg)#bzM5t zgf0BcKk2T5^pG3BA1jD^%S?|_muSV6{jk$N~&KH(+t!F6EeoYA@qI497EgB-|5oW;T4uTf9 z*A2Zq#vTAKTwc}op7l95J<1p9#8C0C2HyGc(Pp%jna3FM8~Dn^EF)!}o*jTFn!B6i zv=#dfQ-)5fgpfI9^YZT27?Jo13o58*W&E=asSoe^X zSryo1nG6>IGl#G9VP!g1$@Hr$sQumh0fIY4i#+`_>htl2YFrpjHy&#+qF;|h(2iuN zcoAD%2thFk9*P!jE@K61t*jp2B|on`d)pc^Pl2WN8hKqE2U9!I z#5dgIO7$dp za<3T|t;n4hYF(Ji(fAih>03DxBWCak?oQ?1R~A>sUU1}D5@qc_<@QJU=||hYwh}!6 z03ZNKL_t&^6XLb=9xZpDF#ex`Nms^rezbULDa7^b+DliPV=D@=a{|3VFl zX(=KRjiA-u&{W1NtJ4wHc9O=Z9dfUCLQp=rKPb+XcuD|+KzzSacEw5D!k59dOgkiS z!TFRNChMaB@d+Rj@2Gh_wf9+mJbt2OGseZnnkM4L=$qLHbosjG0YY#UnAnS$1 zc6ve_PD-QanC@K(?`lL_LRa6sgf| zG=RczfDh|m|H&JOGt4`x%aM1E@oKL9CEJt`gSr#E2K_S^sAxkxJS+O!fU4-&1wQhB zJl3i$=ZnaPGmnQIAB^lSi<6&jX6|k-n7KP$wnAkipp>m}x`n|qj=Xwmf}xNmf!dMS zFQJE{H1v6#A}nAr@2}BX81%SlfbPNSHwtim=Ta; zh3Q`yf^_i;t?tZX>&6~UXNn>TtB(2#()%&3XEXg1M_cXzw)iCx+>RK0QVPd78=@#~ zqvn!j7N~6ld7*#0d^V zl69<|vX!iK7ptMM{Z@epUKn$)FqBrs|F%IY4QcEN414_YTO&<&)`jur4aRM95^mTf zk7lzdzGq9cqO!T__9QO^n)DX^KsKM0kS6`A8U}RQ_y#v0cHul`M)W0M=zKIXM%=`5 zI_$ET14@{&%FzHMi!+^Y&&ql&q2#|`s!diV97hen>nKBZjy`j7k2P?<{M?;^O+u9G zGzR>VqDT zKBE^-2t{C?3yQ83l-!$9b%!Q%xJbzP02L)6!5CuezI!erM z=wRc3YAuZ$kk$=cz!$c13Lc}_FTf+QCs%gFHJT~;XQhCMgUBKW*KDn~)8#8$hS4$m z=|k38U|MbTWY#t@^=taag5av z(;vykN6zPEOnEjL8y06EW8%hwM##X$M*t0h% z*?v-RWwGae^!pk|u;hM12?2k@{vlV~HgoZgW?Z>v`A&IDROQ?OM88?#tqg&QnEVt` z<1SAYZ0=(XbruGvgQh{;B(-08nb8T0HW-aS^d&0hZg1DcFf-h~7Pp+yBji{XfI6;0 ziBI)BkP+meN{&nt@`i=jEfZ!wI^Hq*>)*1}ukhm)t6L3jMECIC5zWwIxV}4Y*hPyz zdPRvbFZ2q=rmCYxxty79-3I~nNf#vE^lqn7Wi2K$`kf*){WG7w!A&jkj@y@=8wTze zQ4m+OS6L)7pI#->DgVg-F7jA%50sNG`bXLm4MRtd94B_30AfCRl{*tD=D~SM@=c5D zJ)Dc~!lL4e(Fprniz*bWQ4l4``_YD8;tf1bO+~uDd*m9%>GVoGWM&FKjyiBdmniXO z%oyN?QINaaWxho+2NVT*!?E!ofJ+Of8|+9$6)sNx0etZKe8$V}yrjsVziq{CKfD(l zcFA3FpfJS~LY~QJ>=obMiI`|QTr-|*wjXa?audsb+0D1{e`zwGugoPVWr!PV`)xGH zosi@?VA}(_ORhZ0>{LvY-r06X996zgn#|`sgeO^DM<^07P;k! zf)|qQo=ODuJK&_XX3{YiFB!iv9Y_Z*qVnaKM1!pYY*K=_ zpq-o~6wVoPnJp29Jph$h^kYP~LBAnfoY=f&4qok;Mgvgr4fvk(eK8Sh#}z4i&?}|br)}fOf~Mblw!ZDgO5*oeyDfx`47)GXuR%# zq6?bHgA1%mgoMighZJ+J$(4ilG6Ep8h#G6mk%JZ6M&>ck&z1}o%}=@iKtIRg=((2Z ziajQ7!oF(yPmrw;;F@?vhAVJj!8?a;K-1q-TtxJNk?KhwI$ZH%*E4BPn)yz5&VuZD zPdwSc4PM{|mTQxjOxSmfEe+s_Fsz|zJ-9a$RLmk?)89rj=wC>&nfVhD=r&voX@2n^ z&t&dg+9>Nq5%m@!w>wUhM6^ieh<;QXu_{>@2LV%le8`Uz#+*r7<;O8BtMa+;{6ppI zj{Ku2oZd@C{5m4XyY2)QDW~F)cZ?-)z)Z+KOG{$=eg%th(16#iJu3NE3454DANHlM zqieTU8#C_u$CWnGivo+gUoR004z4KUDcWWVL)19*!GASqXl0G% z^a-hxz&=5bs*UoL0ZL5YcB!R{MSl$Gi&;iaIo6ghNlL?CO%`=R&C86+c!e7Eb*bUB ztRMza>YS1+R}FINGEAt9J`8C;>E5-O2&zqC_CCms@R&(V3-U!s=Gp)(k}MGXOjEwH z<$g#&5V0WTp`>uFsz6>GlZQDtYfsZNO+Uu3Q0+}tKX=?pg{(z_kywl?xI>2|fOVsPkt#W(~W$EqY!S>q{%HMvA^m z*MSFzegh=z82`BvHWWTO4mN6PAyh}+&hnmGswe05@g&oUW};`$twq=iQnGH8t*qJx z)Op)zB53(<{s;YGP-0tW`$H4kFaH1_epF_AN>n)pI*cBr0Xng0^6j9T@gGLvSfLgAdn{i(V7c=Fnj4LA+jx3HpZFt9)1Sd64+=L?)E z`SHqi^0#qUV`xgrKkCk`P79!W`AVHjk%iYj=!5v_;ZPbuC{A%2Lb0-KD#krgi*%|~ zCqIH2rN=C+DuWIC)A}3V#FKIuc;Xz2DVkbNG>UonX-5XCVB|j+BS2MIp}quYRTHll z&dCbJl`--$?&*s0aI^8Pl{XS!c)^D$oW%K9j}HSBPrgn^4IFfrV7}ae#f(A#y0TT0 zDNDdm%pACxUNrVHyMSsoY$g_?d`!^l96lv!Sob8gA3>0$UV0Z}?~gL@9Ki1G_y zaceFp?&=R#05XG!I!Hwz4hjm0_;=9XyWd-I5`{1M;4|`_9?P<8{QSz=o&a*@*h#`; zWM(%0gz%Qp3^dkioLT8mZ z_igAIN`5i|Kptenpg%0rvNZhpzRtd<#cNA8bL)d_8Yko)wfivBLs`uG*_cEyN{K?@ao|1v*bN%!luUA?yAoz+> zX|T)0O%JjnKn`bks=R~#TBmX(aMLfig+3&vICf|1stx^o=`cQ?*oZ3;4md{A$GKVN zF+EXnk_4NKB-W`}qyq~RP=+cmSEx$|qm-W_-P2lXFCK3A^>}&T@M_m{UwVxKBJB_< z6wy5HQ4wVdP_4`}0#FR@gYxE~suCSF_MBSjR_W04Trr7EI9HcsJws38TQD$wrQva3 z0=9uWPiP~W9(Fry7D7Pes$LfJWq_t( z8tTApypSO~WYR7#pu8|RV2}zvANu=ybKp;17WCkM+jrzx3*hkop$>Qoyp3PYTng%W z)U*w2Vdp1{U`~T$)G)Y3CjE^>Qh(0~>ym$8SVZ=qNBBaGLeazSqwBy^BzhIEP3hOC zToty?rmNX~nBOG`WTa_5H8M{E3u$F?PZf8URJ||j*Q`#1WaVr>e^J)Z5+A_1nXZ(Z zd=h@_iUC0gB+u0zN2@w4BV-^ztUQUTbG9l>ecywTD%}RCnE3W#Q(|6^No@{<Ls6+r^&DqLhTaYzHsxSi`4?*VeDnP6Mi#Pbn=6u} zNalR&5Gt$GnwtN+g5MyrGR`2<2Ay03 z7Lm>Mu|W`VvFIPwtmJEns}m2TyNncM!|hJpjb-=H$^{c(0$Tn#|Q;7eKdgb*C6KQ5-^sj;mii`dA!FLB;Ju5EX|D2+?Uxd z=}*7Mo|jhqZ;TKIr+QRQuC|H?Kk0`5qVr1gw5 z#7LMK6c*1O{Xb>&NPF1R z|Km-&m{4C@GDiHU5I-yd!{Wcd&<0lK66MZqb>G_2ILj`ev`s<3mHv(I$~88wqi;&{_a5E%inB$gY4+I;YIo zEIa}f>_bpKncsM4df=C5o~qut(L1g;i<1G7tF>(BNZ=p;g5=d8E28q0?4lptf5ILW zQZ)>;U_R{#hoSeUmFXo^>7s+Cv&M}mW)nbULlZtmDQ{OYRDhcPRO}icRdkJ3nIlg_ z=x1j3_0W^%`3g#Ix)DutUsie?hhc4qSO)i;#}N~4GL!y0{sXn5u6_L!haGDC8BzU+ zcX|~~^ITD@w61K`aRg{n)_`JeG3@LdsHNinxwoX!6WIZR=T$I7jmiU{+NUS+lPU~S zU(csMX!*&>12V4>%V7iSq8d9GVo}QttK>&Te|>7@%1dpJ3U{FsBP;R(1T)J2l(GKp z^km5mW8p_qAWO+27Ze@3o^7m_h4Iowu>`_T#$iIgChYxP7HhpcQ1kQNND!+1s=Hu~cFz1s<6*{`H z8zPx$?=bCN?WlE1r+N`DEY_%ySddVmKyG;@GYU}rz`H3T^9_^N8Y*GWmA3Tsn>XxN zG)2Vt#n_Rsq>;ng6>qI(af~wxtrQ^3{*wqEqu!`$8aovBPmOgosn;NKD)t_612Ttr z-n_HqR+ubwT8U;Lol?>&RZQa^^hXg4K(@7)4^Y)GlAUe1rxAHA>N8aD=NvEgisE^+ z%CNs+wfD1=gEqrb@xM`6CjG6hA~~!*vJ^IKjs|`U&r5!0MzPn`E9KJ-?8Ptdqk+mT zTIE>xxKu0U$5Jy=(t~~>?SA}O9Eqp~k>2|`TJNVPF9Y!X!=bV`d;lnEX#_Ga~ zl|^kPYf3HuDN%KeBF;Ayo@=rrzU;i#8a@fn{97};cgKd!X&84rrvsp@cE-wwF~{>K zQOSQR{_k>CD*6XR6jZgDk7L*mANI0N)4#sN31xO{<(y@UmA=%q|Ij{I|DvDe68Ob%Vi z&-Enr$2Yt}gx-Za20jXTWkdRkDwprt)YRX&1EA$UX|=3f3nPxNuTVjt{?=(zfoGdW z+fa=xCWh4YuOhX%7vT6Jt0rGnhMc%h+yK}$JQN0s%gig|$EGtI4My6tp67d5KFmJE zX@Z;i#D@O1;?#>V+l5&4xE3HzVh2#YW7$yz&dgf!Dz`y6EoG&&EGBb&+1&LXrR4QW zt8_!r2A+HZQh}G`Z4krU++BeLjJR?}$Cwb(^h?+aX6NpUbFbO7m0n(S+N1TvJ3TP( zuHK9Zj;8gg_s6*~xC^BD3dej@Hx`-GSk;F|@;Hn#^|-(hzM%cVj}B~oyuO~hqtVjm zz}FK;9M-O>rIO{y~&ZF=+t>~z;U>)gL$bG?xQ`4S-@D!@g$84R%Q?Y-1 z2&hv@)u9+?N>x+Edg#W3{&N99+<~eyzqgG`6paf;ylu_{E41FuoK-J@Lt1=Hn!*$z zDTeVmZF#MGTP=h&O%{XyA^#!d%Dc=f=hN>Ty=3ni2FtQf|2nPDr=3-$AimDa+yHWv zh`D^V@s6&yZCJ#WlN_t`%}4`pL5FZu6H%%5T;qNG)4 zvnv9NXza>8tiyNHentasLW)yiovydc5o#|m#HP8;5|Sp*pS#n>(;kj3y%*C-nINv( za1p{K8#Bn1RD2vh>ZT>)DHE?JHvV&$eVL5@(JSa9^LVx-qn6QmvPvod7-Y^i4oQF2 zkxp;hSxYQ{Pm<G5(T3Rr9i-)UFCo`4OtzQ3(?b=uiyQBO0X&nN#6qlt5LoWpfByFUeca=Uf4aSz9fe7?V*6t6O= z9>we!@|(d)U29igqgozc>deYD*E`Qaq$7Dx;wNqM;UgXR`V&515B&wI7cm+X+g%jM zXNLn>%C(H2 z)nL>geo`s+ZQ{4WX8T*4Fr zjM%W@e6Reaz&w$j2whqvp=kYR)Vwdu5{2u<-| zq5hYE_1hfTW=L0_BQe5&+cFFKkRL}jO2+q!HKxBHX^-j)jun8=Bc{iYXAC}1m$sh= zQ40^8;6C1egD5x2N{1ffwM%2qwNC!$(WOD!5rg#g-hLBBFwI3Wyim?QKEMWuY?FlU# zIg*3SSvq>4fTBM!U=QTLiNn+;665RUzSwgK*J$&YUdn#@^IpX|^pE6E{}Gw9Xl9oD z(C>MBX`VssgPI!3``f*E*~iK4*ZZg4^dW(|{fbUMm{(hV4L7FMz~)*WZ@LVlvP zBcOJWRUxv_yMPZT)bvlOX0h~@G56!--X>?MV zh)`z%V3pVUtL-WOFWycjb1b%P>&tAR_#bWn1X%qiEC;$-{LkcVd|2n74RIq~>sIGK za~ygiOFf8rZ3I7r{PR6{D#i1!a&ULPybN9uJZol_So{wk1}26^1zVNHEOQjkD@@4U z)|2xum7LG}S1m$oD7!}1w7!q&X5v(G-9UWxNx?n|+5E6{P?dGShbPw{$jqMdNt9Ss@U zF_kjj(s!kJn+>&;9CT1EmGxyo_KCw@!aAA!iP!uoG*f%^q4Pf889$4Dw5`my6LBLf z6p6S_2lYLd>9_0@A<@RD8*@3R?N5Il&X}I{#123GA)eAtz>QG3E-zOV3Y+rZHVMu1 zp$oLLmTKUA`mkwYespI2XbSm?*2k{BK8HYcqnC`QcoF7-l?VL*SagPYjayL-cr|6A zn{+=7;?$AWaTK_o8ja_YU#gLN!7eGr%_z*zGL*E$=lJ0B`9yo9=Ek>=nJAx+PaT<( z-#q_ZJD$HHctdMvCkjZcYN^dzTv7ZlwjKJTrv`N+XJCjr_13QIC)4^#((hh*@n2*6 zLZrXr_H?x5$2k&S$QuP(GwZYq0w{$5y)Djse5SwKT)r?x1OLvMta|^aA?QCT&{{_U zqF*CzGqLhAoM>N1I<1&chO(QD{Z9jE6Kv0a{GjK>_fM6;^OfmQ8W`?Xlf!dIn9EyL zR8C5L&P1FE^dl^@9tPlP-zThlxq(lx-$YKgJZ=Qe4vHa24%2lJ{vV0o-h^w}%5JDBmX0PXI z1nL@0>d?Rwa|!*tH)R_4UePRFOv+2~4uicv`vzoK%s$DQnH7pW++z^|6#d?kKIpg6Fyl3KIdWV%7rJQYv=#I5 zqv4vSW^Pm49F@Mzd};k6(rE>v*P$>ziM<6Hu1f9OViA2n@+fGF{bwNZ1Ufa+#^W*tTtB(y2Kt{k}ybLgqRCT96 z@-sX>AQv)&_uhzpj-Tl1*I&xqZ=05=J@jk6%5!=IET!LQw4$Gb3@?=lm3DwO`{k!U zds2nxb3R|6q1DVQ_Qp-@uW=EnfIM3p03ydP(YS{EXK#en=hdN49!ohXetz}K^v_Mi z*s1x4_VI@rZ^k<|rNv}K1MUD2W;BbDElEE(W-0{WaKl!k^ir14*73e3g zRd6`zcyv7=8~GF_h(*7Z{B&V^CaEfeQE|*iGvz~g>KhODUcZ|7>}7vi7h1rs!~c+~ zaJ@v_gmd`c@}FWmeFkjpL18%@VygdkhDnqlppKf+oHeK7*+!X z4OozIy2+mx!Y2o9AfRdMF}#8e;+y8rVg~b~b(kILhxL~>|JR@&A^&VU001BWNklrs(omB zd#~5h92K#vgBq6Fq7+#}kedEz^sXk|BTL}G7I+G?eekgTYrmn1T+d>Rp3mc_Tls+e z$G(H4)Yb~LSz||w(jVLDW;KMp;R4Pf$u}NqS(>kNJi%e-FmTT|C;x+fAcJ_rWpPD) za=8rbRz*GG{rl1!9fOZmn+Z<el{!qr_>!dtqbT8D z5Bj@tj8|KqrZ}iC-W{oGy8D|jh{NFWL=$>G=dd+XhhMRnl^U#Jyu3gBf_-QAe@oB6 zK|z0Sz%f02kIl>a+aMd`bv;=;4d_d5SW1iMpY-ZDmV>Li&uLgeTRW{MplGQI=8I2) zIX0qy_Ul$UY0oXt^G{wbjU#r(b8AAFaKV&w6k4pGIJ9?!RsL0p4IAQ3<+SH6@Z;ZS z1T%5a1U?->9)jzYbj(+YvA6sLJ*#N2u@LgX(ga2?vo#$Ue!q&j1PxN{1JLQ|$$BAd zABJ|o7}C-kfhkeLNw+=H@5pN(Fgf4bG^6v$|CXQDT#OW4t1yE83Ot_tpK)ZTP5U@bvL%6S76$`PbN2 z0r-O5L}n6N2hIjCJuOFwqtAs*UB&bDCjOPh`kdvfr*0V2MS;{}l0c z5a^-bhKNjz^J055w%sj1Xx12EB80W&!+P?^dAKJ0z)q?{!zQ9{&q^%#)@np&FRT@{nb6T;3NP-e%6b~7yU>t#*pMMQK?N6BeY%to3(s)BFa_rYPJ|3aJ5Bq~o`^?N+ykV@U7m^O_`pLky zyzxVd@Wjwl`1{F(JNKYklm3lI4d4YoOYYxN^72lvCwThaX1YC}&&Oo_l>WH^7*+`f zN5eL{2Joam#Jz*Zxhro{F$Cqc1bi%E!kifyybw^^)-{xNzM*s&JRS>A`cvEoGvBCr z$_Jhw`zb-oS{{x=&tpX=q=Iu{pf5ytypGGk*S+A(8L)qgc}3oqX=ArXd2ok+qD{Vn`c z&fC^>963=o=9_;s_f57v*EQEWACuv2Re${L1bM}O8;A{0>;J+N2Qs%hd}8Mx|IOvB zCxw$X4D8&^S5{A2m+jaew(%Tg$Z6}X3ri!WJiQ(ZoZ^k=M!c5LtH5&?W-eWWU_9N! z8vHlF7Yw+VDG0nmt*2o+bW9gq3gVYl_% zj^m-{657hI53+w;V9U%JoS#xOIlRY<@rE`){We@J|FC0*WDjH1^#1@LUgqYt5qhoy zgZ~(>yl|+Ez!4wb>!AWielXpczaJe z2Ty;(uH`A4$K&$EXmO_f{r$f+Up^&!ef(1`WS!5%AMh{upVTYO4}AX{{7?9w*#E$8`oEwHzMx;Y7i75#2ULkZ$LY{+`m-EReOCsa zVM~*KJn6^a^EF~pdy12ReL2pB{hzr1g8l>Fe_@C4{005O{R@7aOb$p=h}aX^&TS@gIPJ4%*+}|NAHX(;9w_Q5lV0`oZ!$1PuNc_P=5O2kzkfpV)t< z3MAWqVK4XvU68@Aq*)29bv@u-H##9_F_`l))ckD1hf5HF4{U`P>=nvcr{s*=}AxI0laI@r} z(8A4R#XZb!ejCFUMF$VOuCOdJ}+8Rcwz{!0AW{> zcOgNC2&@V}Bl8>D8+YogWHidw^Z~B6{%tgerm;gRvuZnNfQ(*1fYS?*tL&YXwg9F*8VOV8DiB z(+>1@F#U~J2M8|k!hL}RuaomtDU}E3*AScf9PvSX{{)FOF8*TNQ}_suU)j8AX!M}D zmlOCp?wyWN_qHHG3$|d)RzMoOu!3REfI~>T6YakoLSUeTMR0N$PqF_n zK95!OTH1Z)0FUhJwFO(ygKs(|F!9}dfd4@KxO zwAtQo9+)G7hG@}u!mse~r3(fSdl0V?D6p-68Bgybf2qLmzULQq#raf~PGE!ytrD+{ zu<`;)DNi2?kb~|ZV1j{Hw0;*%VCiij%~deLl2EVHfnHWXgKh$jwNg*hbjE}w;mXy_ z@RPL-9OVR7>}L65jJn%`zJUeZu-jwqbT@Aq(_OU>fPt&<{(=nK(q=Vkcqnb|`kH(% zHwFv(Te5A4kqvojS${^RbZO;{CvsrJ=rZ9t_qIe%;2sk1rM>Nj8@M&`8MRpOEd3VS z?&lVO50m*+SB*_!)uSeYX2vlwUL70snC!8;bZo#c!+%@3;pav~rhYeYwn7Y6*000~ zSeC#I3sfRFth}(bNNQ1;4A)@$_lba0qL@!e*bQxLb1a4GUWU6E-t8Yp5z6}p_}I;V z0?ZR@g@IOBgW$iH&$Z$|dle0RUOr%^vtKZxKEWLj%wck0vVacZ7^)}*vlFyg?yrA0@`!%u|(<^8!O0d|C@3?%!DO^jtY-@I8BBhZ7z@=lu z!;P`DpsrF3I>;QbnJNok@ho7_L)xu~+|MPvoJ7Cb0rS9Ogwp}aEHS8GHQjQZ&Mq{7 zTLa9MEd`D_4f>g`s!b04ij0bMY|s=GJXbPJPfSS+v+tlwZp-C)g~(PV?~F-3%e%IA z1@LT+H@W?!@B%F*K9G8BfB@QS5Mb-Tq`9@a-LldF$Pt|gzF~kXAh}#FlQC00@#LYO z$Wmdmd$-+TqUoT&*xh~$kN?d1E=P{jPh)Vqh5q$CB00fIbs^YIzeVM(d0b*yb}IxI z!w^(~YKAX%fPZ=~!m*eMAGF_U2S7~Lr!*6Iyz(P_>;D9L(mMs-pdiNr?zr36bQ5+x zYG^*R_XLi1at=Bo#CE-2c~@omutx^!tYhjhN_f`zEoKV1ig>7NKf{3Dwr&^VI~6A( z7Ajx5lK-gXgt_#g8odi3Rd%N$fDpZ8SYFrM^dGJcvod=^n)ud*kUCEz4b?WfuwmG{ zdT%(eXLsDGN}#B;2{ToaYitoJ*uz^t^}G@OEZXc&0{78*As6l%;_HP1q70hJBB; zShrM=*o7oI;pVjk?Y$=b4d6IV+(T02zg8t zjeD#{*ef3Ff8akKrboH0F+3J3_+umG05InhJPpxtW*Sd4tH-^NxUd2y_zk(VH@?9? z-CTF>$-b*0O6_<96IJWcM#WqW`~jtj>7oTcK{Q}$1H@=UfRv)F0^z7JaL%hH~B660VwAG8h6C)*tUiA$6o0p4ZDO!)#(NTJ(v$E z8WD`~^7T^S^yf`w6Yb-4pTk@M-s7*#u{XG#4!GMbxkI44IS5y}dWn@iEa1ZdoGajE z1uPveSWVEg_S1kdC(M^9%n1`H>tlo6?b}kSr10VuE6|M;wquLR;r0gq1_Xy8X6=Ry z-OSQTCQnd$OCW(ytF9IH$y<(7nB+gG@M99?_*r{&cEs{}A#=ELgTng~?!g?%U@if2 z!LT%1jXY0@}64nDasbf5#KYc9$ZS zcNaZ$I4&1d!vWRxC_&RkMv$tUiPH41XslJyfBwxNQYGP2dz+E2llNC;CY0P>o5 zxV<)@o*y;nR}kJQCS}7S4K2vsqp75qI+Czx*cZn8t;t6f1X9iu&scwqCHi( z>^R|Q_}+p5#cZU!NBlVHM@(c28X*UF{PW;7g>V4y5^utl*Rw8SS44cHeE>o;%*q|hB^0%a(T^#r*uy!qOT z`qI2^7?OM^XtJVqQ6GD=B9@KgsC6<-6T& z!w@SRH#I>c`LKaidP~5G@3_et2s0k~h`3TsaGA@6Oa3ic_y7mZZ!DAi?7IOL1%O@F zzecyO?!po*u2wv==2dY}=UeYmfhk2VH6|!IG7*6dQ-m)fJa3|R!B&7D=EY2*H!IpH z63-*xPoRegTtaWV1#2|`I#-Gg`cM87!{~E7Y592u$Ar*j49NxCdv7ypL*{VByj>?l z1rYJKq$~&JWxX(_eF0X)E*SI|{0Cr^{SW~lW#a)uy_PUtUYnHgLIH;ykaWZ3a0@4M zBQAssC4j*NIDE&>t&lp2c^b+SH!NYTbnb0)thG3)@w#b>d8OrfZ!>uhnR)3#Esf0# z2ev3E;F)03jvx|7NBX^hbi$(F#SWcrcM?}*p^N`Seu<_0{2T!w6M`u&_ZUA^YlsRe zEr*%b)z7x&$eXMscZY$xVTys+=Z4ml6_0o&E_RTdDvBf3vve$eCx1=^kk__$GX^3n zGN3{4fe%Zxqk@6F_H3c9(1!kt_;(4)6c8Nl)H3MaFuJcye3<;sgq32N{8rx9q69m= zkRUEY;4uB^%yt&dj}DZG7)KhtMM`5iwS*`?O3-&Sh#3AB{>4Q;AGeC9Yif0;h@3{2P4SqY8 z>qe-!jQ*YCD=M#UU(}sf_zHlcB^juUt(DS7Qv#|yV~iNK4QTV(L5h58?^J<3=&!YK zK>%)PFCgk+GI+vgcW4n_*Gnm4vzGi2XfaZ%aX@p8kax8kLk99j&$MLA_5;s_p0p3{ zq{M^~o+&K3w7^Zebc2%bC?h#{0PF>K+m`pYT)N>^v`lx+3gn9C>^=^h2nMAQChbQP z$8cs=Y4K@xQu3!U<@r_M1Jx80LCAm}=DvZv4=pa)b8#(m?NzxU%HUgwUJxs>*~#sz zaE_G^Gy7cxS)$IFe$e~3^AA|-H*YtLy~859T-e4hKw8k>s5|PFcp@icA6Z&;47O$#gBu2z6Bv4(M{DfqvRe3RwQ0jT$>F< z3d@DtMd$5+wy#iarRYaSs1^@}oVfsIH>n9u@`S*F1TqDrlK$L_Oy2i(Fd5#fga@%d zy^7h1d(dd+s37tdx3~puMICYrQbh(HN%gYzVmWaL3R|J?684?s50NGoJsj({KB zc4dr|RhiirS2dlETAHeWb|vtfS)g759&UgIfa$RRhW^mLos_=azQVi70lIUcQdTa8 z7fKSnv>2K9Ux9lB+Cg>J(L8*oEyLNxkj4ZIF91}DhZFQSNb&%tg3}!GjIk)!H`cF; zDP4$|MvDPqk)22e0NOzg0~e&tNtjfdQi()+)d1oznFeSLjpbqVamCTZNFv}~mRz=O z$2Zn5zu|Ooi*AvFW8R-5ZJ5RF?G7nwk8Co)82=U(qy-cDv`hzI{qrUNdVqx%_qhjW zErJFQ|4aObLrZe=$a{u6UJLs6JcJ>KLofvJcSTT~csQJxV1^~w!eXA&y1Jd9TZ8a^ z=ny>cGVdCPr{~WovJmZwxnXkCj?xP~tf!lFI|AUu4rI_qcsOEO@WYD!U6lZ(_3RWD z4;Vff-1yi+sJ$8=HH`K15+dAGc?inkpl_3VBun59(WSMRTP0VW!2j0X#%vCCPnTFqvpu>MBF;DHt_znG zg@P>n<^W}tGLO@AKhlqb=Wr6E904%t zE@xQgIO3SbJ*!=-}gD3;`(vJ1~3BaM4fEmTGEtB8LZ%+;v zVvd}b7&*q`MwZoBttcIeMLNA-FeQX5*_s}r1j(tH|Pd$x9yS#OyKSM z6)4~3Al)3ww7Pa^uLNL4b*Um&5{@qvtpb_Vu+IKhOPTmV|739e*1`SZ0|L#oK5W>6 z0>y0!j$81Mo;dWdcUY^1)YJM#QcK!>ge`T-3>WhLZVNPB`RMGT!M~zEac*8{?8B=S zcH@mK;s_AwQNeTE4tLyG+B>KZvVDgqh6S1qrHadD{O;TYpd29+WLs4uiHG*ui~1t^ zxq^PNRks~;gt{q8(S!uu5vFXqOm6V)pjRK0w4}XZm*3^^6VXbWT48}18UO2sSvFHP zCQSqi*t?BGA8Y6cazF8g&Z0sGVJW>5ct_rL@7rw)aJr%uj$3Bh5!0}|vEwq_6{ZC^ z2nPnxGY>c_ifhq<{Q(qW+53%2wtsv@>?mp$$@t+yXYRmC^Zjx>L?ATY)YM#LCUJ zyZ#YA0EncVFCK%Z^tLq~rUpb3dM9k9l~0@1nW-{7C%-{N2H5as6X6t?gs zR67<0)Aq7s$l90c*7eXv@%6Z57^dn5Y&!jiE}%J zb?+SsvoBW^!z_E`!V2%O%Z*1MeW<6BC7R6yDLy zGK6hxFD*gevU{66dYSBYURa(T>Cs5wgqbejcF`h<5c!K}rU-Mr2ot_4^$+42DXidf zc5w`rtDpl=iL#a+P6UVUUN1K+_%{IX+r=F?(&gB)%fod^vaDohw`aKPfJTf%g1+- zP9R7gND(fO)+hb*=hc;rkddFx0qEnY05V|uuH8By_`E&Ji4wTUTfChP`j)%09wt^m zwms?(=ssXdo2VZ05Hds6@PJi?#H`KmWPx5z;K~0=9cQ0-hhH;FKK?-5z>P>b^UjR) zcTtoD7-c;`$WR6Zp$v|x>`H(|A4di5SNBJR2w6JdVea{p{2;&VIDl79LJcG0(`bLg z{=k;Bvysx}OYTugO`$?%)OVl*<+@SyM=z6A8~W;F0bN+-g@I5 zxfk4Y0~|Y#gJggmemTEy}cvZw)M400g}&GuX77y+xM0w@E_X1>lYAR1(& z(NUQvwy>&4w2GwsYXan^sAvRA@)bK^D_ZScjm-f<7w{d4-u`UKa_09}VC>F;GCOKk zQMwLGRahP+h%XVEsHePq9remxunpJdx7jZ*>>v56%tCCr3Aac61VXkjA=|had7-h;Y#0HcD001BWNklsQ(_NMSSx*N%i%CzK<}0yv^~*;}eLk(di=zPc?Sl3;lW(OD zvs43w(wM`S09^3zR5Ah45VSVC~uy(5rXcPk)JxmZ?4EY_9Xjx~9$kT3yGG*RT zegl*lu-&tEij9hAByl?z;$#44K$yQz6l3CuhO&Dw9T+8Nq(HBt`QZR+36sxy2lO1C zNgnQoP5fL7o;lK;<38Zd=t_rs^k2EHl|2z9c2R^prIY*ku&eu_BaXK!tR|!@2k@iU zdaf-*VIMOZ>27N??|@&n9p4)pytgDoLGSuPv@2vaGWUDQ(?}7UT5`1p2i=N5Q#r&Na} zG62HPy0$Cx&UIi6fSiPIMT^hM@Z~aCeCs?Q`TxWAgB3EGq>n8q;^VO-?-PZ01nX|T zjlNZByGUmu>q%4^ws6DD^A5;nfI0_I&*MqrhG9d0bO65$iKz^jMkN#3QUaJmnWM2&mV*j`Sm^9ZO zgvAUy4hY&3sx2}F$Z4Jd^H!WeUDf%&v;{$5k%P-UtOnjy3AoY{0>3(7WSvbNK8zpp zg+G@tWGE^YeRh1O1*?fd+0LZf31Pe30GNE=z!EP|+jn%!tnGp#Y30ZR)T%OD%jsoU z#Klv@YwM3v<`=_X+wpjAVNjppd}2n$NI~${CH)--PJ>P;+x(-UDH;}m=)tH{2MNHk z(h4SRAVeW@3m=;p&5aJocqgb9UR|;AovU5`b$1xM{cc z-_Cyuo<-Em0);P&?(66RvWO_JKtfx^5rMqQ+^=zs$qXWY0uWPYf7}kQ9E+-e(a`{4 z8y4McDiONBqKrXq$=#JIAC(N?mI4#SXaq(;AC|S|P#5|kDg(h00sMt8W?zJ45m z#wwxHMArrCF9Jm#rvj>2L5KQO9;k+&fOkWS?y(RWto~OGcvW zY*8C4VE;=AmOF=T#e7y?^nXVF>sPKk2HLH+JqLx=cc1ZP`X+9NFT%A!*MfGndnVA= z?SR!$hDN*ufG?}eCo=*@1zyJzeK>$l@c$@Ff)kcgyin1=a)8*BIW0O<<#cD{KD9Fo zgfGHfkP8b-h)y$VqfRQV&QYX`T_Xj}XZSUX-F ziqZt7x~TiP1;CfX7l7@t$0`3Ts}_$_V$SadCl8yiNs!|2-ye$hRKsxJWL{~W*Slyz6(Q8+p% z{GV|EA8&{~aFEGFub=}v4}WP#bDp>bZ*g}@9-EQq)j?D()CAFQ6hCE>} zn*^9;lKNjfUd(TlEp8#E3U3gSkpjah{Vn_3?JvnUw%|Y@(Eyn8Z_xTOjJpO? z;MoNcy#tM%yZQngOoUQkKS=ENt02GxT2&kd0##C^pt}wqDe+LuP@qSSLE-;Qa9jaE zgs-gmcWaOUUZdkMu);~f8HhLke-D8Ary_{d#L;wML7ii~0hiq}?Kn>o(f)Qs?@~-% z3wIKEs!XOAiKW6{I#ZzgH0d~pac+3w$CDrBr`}U;G+r?82wpkaMSweQ5l*?vheg*v zD}k84Pwuz?F8OAE0d(rvD?)_PNW&pcID`Q|Ie^E$L@3J@hujMxPo*cYVH>`kk%e$X zh;BPVNqPQl$VsHLE=pvPKc?Y&wsI`yocq8HnF30{%38w2a_C-^xsu9}lmF8GFY!zG61Q|~#c1MN{)HIv2+19b z+AhY23N%)Ge4x&@J8O_vEf7CAfFDI4Z%t?%flO)z%S_zz=tAKwlj>}Cwd>_9yDlkb zOt!>LKrwp(7jc8c6=UEW*EO0xyCys$n8s_r6a9x*OCOey2SSyX5V%4)lFu)TP~c!v zAWln~wgSyDrk4h!UT_dwxQLC*vZ0>cQO3gVJO;E)|4gUMwLF>cF@k|`uiy~?4A?o3 zPg%Z(aEs9w7kjp4i0ON}k(MYlR&_EmL-;b0{iNC-P7ukHmq=0giT|Zcuh*5r!xA2D zt&9I3WAD1&=x!?wR)LgP_RRg?={GxVi$u*29LQJhWJb?QN0wy?1aUsPgA05+98pEH zG4YlRoSO~|dDt0Y63C&>Ero20Z*^g+%E>x}Tf~F^tN`@?^}JO24yn?%Gqy`2eljze z<$Mmh?*$ur%cES?y5KYJOUm?7Hk?OoF$Gf=q|D2MPba`WJs3TIyl>&-d0~jSD+pvn z)H20mxk22{h^4zM;&L9v7$=P}b1(q`7(5jEPP+%O8Lr%4JAFt~|B80+7W)y4O!p+j zmd)$iAjYMz>1nb*STDCBO^p>wSafNL zgj}|@0GsaHj1Zj+lQ28|fvjulMSQHf)64b`On=>Bht|OlqEQsjsNTbbe=8EdBCwq# zHuw$uqy6PJ#V_FPc*{(h@*zh0%k_Q(FWB-uVT+oI$dC~XBS<89xs9e8Wzjn9^1%Ko zG5;g?Uhh$~$3_t+Vz|hH$=##HTw3DGO>(Enw{ui*6r_(RG+lZ?+3Su0%$p36v3Grn4f0Z|SV!}io95J9>-G1kcQWPi~l@)Q47)I+-ke+#sna)EPjxyO|3jOW&rf=sDZMYAO z;Q;m^w&&C)_w0BvjO_rNob;S@CseL?0C3pIiMWmWxQ*GQ({=GTe7vMBD8Vt(TmPAj<4YMr9D-{& z_8WuRTS47!nOd;uEGGth6|~nl-%++3`Xc^w623|2Dwp|-49e%WG!)`->#aIhf&v2y#-e ztt8>d8HJY4RBEuAd&Z6u{}fCioWduIipkk!paogCkNnjGz%Oh1<5KQZ)GIB57BJW{xIK-$Do6+z&Ln$`jQRfN)j{nerQB%8YWS)i|C<|O^hKS^*N!~f{82i-&@nwGV3 za;K47!eamydtsKj4(!F}1=u6x)YrRq9zOwRj$u ztVJHV7@+Wf$fV%x9MZ+HSy@COu%OfL!>n0K)z*{vC|l4=F6amNxA+D7$NA1-`u>*1 zfRPE?@EGc7+^(EcmMIrSn7tgD7a0`(y%UUnLqX8s z7Iw_h4CN5LguvFaGP*rx`8h!eL%tJbxqj6r3+dDbqEbpQnA9p6W_J{>;QuSCTkikOarrRs!pM#=L)7tf zN-~F*x*XX@;#pY}158(r!;-)u)!vm9aplCP6t!nFyn&cGs!3UnQ#MaNPM-Sia8NrS z-6InpLeS-08vlj{{Eyo|u%9fVcZV%#WG(&Jo)r&KiktoC7!hAGywbj!#tH_dV;FINx!6Q`ZH}UlU zvpvf&F*B(y{eY&ZBEtgT&L7y6JEv*AH-KT%!XU)xVUGNJCh!*7>)o;<+YD^UcGEubcJ{Vx*5A6ST{s;q{0%Qw%Z%63hjB`f$%-Gr?ePu>C zHUu&zk=YfC!&O9(0_gG^U7Vq!PEhx#2lY!ie>dcQ<{m(STQuMe{s7G#pys zx4St}RaP3H&y*!X1z2epB!eFJx(kgMw869wucgZPDfScfqdi40-$%g@vI^dZDyJ7zCN#NE8zjZ?# zr(8(~w_~|EJR&0=t;@OJMv9fZqn<%uJiE!v0xD&WyjCIDdRm^B=w696#=Z7_Jv4v?I^5d=Bj`BRRBOgK!5Kl>^~Z zMw&Kcy#Z7?0iDMKK)kL#%}_$|EQ(+|&3|FOV-bWh2$a1cv3eNG@huIEe-Z!dJ~c)w z8PTxJ72eOr3tF2NN{hmlW|m!s1zNdNRu=|v;12>)(}Pf0Z5vM6X@;uV(5-#ZC(6*W zIj~(@{Dyr&|MoE<6ATOBk1!NZy$^iG34;* zdJbcd7IlAUtEa1&YP)bMU`u{$QIb7OES7xA(_2-S=uvXLxt!n<4f-SdNBjT4Ov7AL zE|TKFz~11<>=ChLkPi)UJ4Wa-d)43tQ+6_U{Rwc0>h-X1z}uV0;b;K3wkMq@MEb$e zUt$Ki+vfg;y?y^U-xz=7fc>Kl*bBlYF{o`ETTWp6Hs-*`AVqr(`Y!%e#;#;_ww$zjk zunMNxReu$zy+c6!_58s3bPN9Oie+CaM6VS|F3K%Ru!8fqd<7SYt|)X{fuzWcDV$cf z45>!gC(~d(&o0f-so7GKAvWzoKajpb)~B6KTNj;`7}`R}j%_CBjn|Lf->)=5HLWB8e>EM*7$P$>xH-bk2DdnAlp9PqydsIX_CHT z8eEjtaBC0X%w8lihe6gRyb^!J5kQI3&I0hro(O6=a_9Al#j)ACdZSF(0^YDcZvUdT z{Ex};nnBZno zY+=tU2nvk}N!qkB8d&8_EaA>8F@OrZ)&C*flQiFW*dmX??8vDLQC9v^7#sdwbH#rg zFJQ?g@DpiMn)%uogBvj09z|pFvEeg9f1y;O0HZxHX2dkH8W9NHl#;TuT;kwQR$RIc z^4p<2OrI@BOZLs&?)L_MWG4H@8X9l;fLl((ciY$;!(%_v$g#Llo++pG6!FW6MB;zq zD~ECx2RLG)J*?(Djr0~n?&<XQE$I*aOSW7aUqAYp7zVrq*I0<*+;eM# zT0RCGZlvggY3DA6!e`OCRS_%8=&TS5%6zWU9#)#wZ$fYQJtsNEF3_VSVqi5v7vHcy znE#Tt@W*354qqJ-9}$TUES@h|Ay$z8bGYV4s8 zLOhkYOhw-ML0Ouut+9b`VD0z!oG-_shUn79Ah3YI=Jq}2@Z4qSIhivfiAsAX#)vf= ziwrCJxje9>|4Reh549b71ybs5(NuMGMg~JBvJln(kLGlJ zkw1WG}XysC>4RQHb(aD-id~gZwAC^UTG4m2nm`C)!#{8Pnss zS+-rAXh2Sm7%&`>t#8*K%zqpMc*%tq@g~MU;#)9s2{TUN1ojI+7pK#R3SvqXh-?16 z=^v#Hl1B@}0e|W8gfm4mE+@F|Dgs$Ems3&y)_yss#t>$h$l^E0KT_a-K~vl>!6&p{ z1T-Rh9TOzWvW%G!j(M=_Lhk8lHMB79t^`lgfJW!?l!bFsN6JZE@xCk+!^4~M0p1@ znkN0Cp*BEJ=r@#&Q$ngVZkYX-vKA0kq|AvZ_HXw&hYN4PUUT?@F!0)1BpD$rZ5Zh> zuP>CmIkVX;IW}y2W2%8b(nODxt>c9z+gGg|3jS7d{WhJCJH$jrl#}+qW0>@X`6V~u zn{#Mc7@g9l=F*U;J+n$JxyGy)NpqVp2{(linLG|$`nH6 zAtY?@e`O|o0WT+Q4Ds9je=s7cw+S0B`P;`}hH4I0BHN*KrFIm)dxw~z|6o5 zlHp8s5sm*?kHgClRXOO6*qZG{M-I?gDN{a(KkUaS$B3Foy@BOguqAB`gKv!$^BrOc z1hplL5;9?duf+d!Ukd8LlQr#Dz`&R1{rI@TS8PEqg z-}<4x+80v}ySJ5g7I-{+UNO@CTV@ zsalIs&jfMU?zW+=;nLP#7zQlIn`VN^&7xC(oiRB0li5j8efo*JWA2|3~)1oZ1mnTXKUfy{JWMXi)AlV;4%8 z1k22Z2ut&47!++8K@s?c|6-Q@|Aq}774AgLJ(jAN*IJDw{3DA>gvI&C-_z}9zJ|Zw zLz8&BPwUcFn!$s@q|#y{7f?_;J4!}9{Q;>MP*OA?}w;LbRJEN z^wHw?6M$=dWBjjc5I+KZeQ`*5OD~KS`Unn>b^us(Hn{_iF;hgS7VX{%fH3JqP89#H zHu2hA$^&JxBy{a_3SWX-YFcn;h>lkbko_}#I=+C3J#s#`FN`ng9eS65e934|A31C) z3n$|MHC;S5;)z)k5%lwwvJrrMB;_;QmMARDazjQu@5Cg^j60Tdv_@fo(sW3e3)=hV zhxT&)00VltHcjxiPs0j_Zh^!irtfVK;7Qs3E)UkS5?$>I3snYM*aWFX)G>hM=N$MZ zO9xW98TO7yR9m%ECVkv3`ICSY!Y)XOo3lOv@!}Gd9M8 z-mq`!g1y*^y;Sm}SM*``v zN5ut94$!(?C!W;BmSB}ITzi8H_D6GKzaxD|Ovex0xXt&#aJlDBOmi(qR2VzMf>VjQ zv|9ulnb2m#-w*Ma3+O>{t#>VHCg9J(TUPc&5ZI-|0LbA>^N0KGFwG%2u=rZOi0zy^ zS2ZxweBdBRcS^2dEZ5bFoI`qcCh1Fe9d5y|GU+xDiI*J--h!V7MgerGu8iC#yC4c3 zB>2L7Yi;=(^X+;;VGeF3D|_C2y?Aj}f6Ghr5yl@gm@-Wkb|fQA2% zB9YKYrY`(1!z2W`!H~mCl1=I=1im0F`J4EWURy7|exw%$p+CYu@F7zBN1WjaXKB!j z(bZg0gtXhUdp3xtJewmwjc}&9h7v&oY4@Z7Y6xe`x}KD~iKHf@fFP%42T#1qH^RgOjM%5y zxw7$*$is2Lnti+sjqY!!gA2A0LATdS?vEBFyJ@56knvj9R8UlxYd-P?-HJ8Xg9t_e__lGC=!K%Yj? zeLG+b_r+q+4TK~kDn)0&SYdyiBrtKJa4EG2x62i%Bx|U~_6hpPv9dCCfTxvhi`vox z{w-2`VYzFWqPLgAm_QV{0wkw9V5bQR`Mhf%5YdB+z} zgcKuPjktCdC-36&uu+s@4F72(zK+BueN!QZe2yd@*H!`&CK zXO_BRi~!vMYFP9I&v3lAV_1|mTBQ9-H&qN*vw{vM(4(TCR9Y2dLl19sg@>vv7_VOw zFsO7GU1P&H_BY12B>LL(n}GWmW6-z9=z>gIIHqYkG%}oOTWiGSGK+8Gr{#J^kC39@6Y;>XC(cZ3omf zkbI%w7s^8mP?CBQeir;m$W^pd%>HJsS;A=?;)FKGfU z%lv~lx@ItI;nG>H=YYVMivEKaH**+g(~n2U0Vys^A+)bqT}0vQ5S!(90Om5sbi5b1 zWpCmi-*Q{kl3*Az-5%0xjbVma=CLl-t@76S0>clZCKbR zQ9zwmTK(HKGSiD8RNj`CKC_fqC)5*?HvTdL=5Yt;M+#grcGagEz?+)lf;sFhehCsb z_Hc89Z;^7Ck=PQXIl@bg`8oir;!OZgG1)A)>A;WDXO>1#c{9zTA#T*-=HbVnFD0~Z z+`@uw$bmna|I!xBU2nQ!xL}IdYAmpKxw+LW61C81S3R(17yNNC5hV|PuzsVpt}VHb z3^l~PRPE!1|2WhFzCecwzF`a6(#CYHDEEHTP1rEYu{M;#YK|%(C>QO9^-$GeowS2t zs(to?Kk$ESx`MwgJHM%hKu!|LSV~?}@UW*z=WooHmcWJA^0mMh)^<%wpk!MbGzlvj zSbUORnFgJ(TUH#;7w=_(ZrMmXtI$qt5iFA}@?gwHHfk(|J?>*r26g2(4-_`&D%V%1i~ zC8FV$qeJrr2j8$S_*;X}^60L@imlf#4r~Xu++LH~#%NH9PZeT62AQN|CDLEJuDi3t z4S%ogL)$()arT5i8UQZyA>N!-Jqz#<{nQ&AT3;9o_;y-Ozuj9TCge`oj@_5CSQ|p5 zu=LHyUIa;n+;HGkD-lX)mOS+ePZ8h~{x*49VP)h2{ZM)gp@TQLF}~pw`UiLmZ@q`)@GUOP@K-G34s?e_ZdM>jxCU91r;*x(l47=TU3AeR;~ckT+{APAKcAKAc&Zix^q z%D63sV%VUslL$>4qImPdzf*B3Z(@}E3HYt6;x02Tfwrv49S#V3gD?0G2H-<4(|RlX zi4~hjqH`*8Xv!$SmSU-~*s0URt3im3U;#s#kk`D&utXglA(aZn9eL0d+byf~5~g7< zE$|gen7*AxdrO?-y`x&&JmxKvgl~|(U_%15b3lqA2RyW#2{03WHNa)f0mtMaDg=QG z)Tg+Kd|S$2eCi5Sq!<3L1t+w)KoYLHBQG#$QR8`#nY-qLW8c-0szVff^hQ&U)A_}Nvo zK*0U8ls*S`u)e2%QCIK~L!TlZY5w%3BQItzsmEtCvOCtxi*Z4P0$6DVLqpSQJJ!ko>mMU zPMAkgAI#t{*&E|aAQ*xEXz<=z1bl=d&a7vmH<&4s0Ub_x-BmHozlSt}H=q2AOa7!S zjkGNue;A|{Zg|F`p*JwJ7wu>3h2ij(hi&4GIfV&qVL_6%+%?>`$+iH{V-J9DMO%p?=(7u= z&g2nM23qEHXG+tKt`RVR@<>en^tS05R`umwonmaPr2GgaYy+Nt{j}wJQOa@-v)HVP zDJ(=tF$#fi4VG0f1ic7V^o|CcL zJ}sE93)-oPP4EQ{aKZxNau;kP3U0UMiY~8UF5S05j%vVmt7bk=nA2>DYTqG)YcdWgePur)3e3wu{5?*gjQq zhBNPj1ybSPuS~Zxt8rO61%IkuRaJ4vyR4YGETN2L{lwxeS;+eBd^5y7gs~&<;@TXs zUO2W(j*2klJu~bfa29POC8Ib|7yUH!&RsqL+FtDW23axJySpcKPtz?k${t z#G<62F2!6k(Ws3`KWTLEBkzA472j&D@V^WHO+Tlr43%szYh2xn%a#Hsvyta@aKm== zphB*(vEkgDhP~l*I-?E39cD4zEmj>gj7bQA|K`UolL$-BE-SS8x4Xu&Myo5lQAwj& za?{R52}2 zhur5Fi7MVKkoy2C;3qQ8hmt8rg2Wy8k-Ivy-J>@Y^r=FQf`Zid5Xw2MplL!G!e2bCuOibz}PK6yz)48$tAQ)RRLu2?_sMa{OKHD^dAvgRxI?; zOg%p=jAHcmBXMjv8^nq@yX4{~2y?(a)7Q?NE1m;xA{fgufYAKQi++OI+dfhYT_~p_ z*11?{X2IV&OnoDf#%1b&z^8%Q7#sUX_74DXGVkYRs zz8vnGoH9esgta?*aKfe+59@kv?O|TAprC>cY;j`@xidmtL)s3~s0gq`l?FTtm&yL5 zDn=y>{xBggu=D#pDWRsGl2v*TWrc(qf%ffAqyOcM2`Q<4N$P)Ha45#mjKQ^Iff9|8CJZ7ec#?1izhr))EZ za%+L6MKFp%W13p!!ZY-cbT2dm)XJ15Z=5Vk!gj;o^EtYDl|WbW^QK z@D0Aj3A|x1`2vQ_;gg^&dNO9p!!V&O(UeXsGr%)DlzgS-ZWd_A@M4fE(i5_MsrmYp z5?G{RGB%YI>3}f6?HTlk5c?H1a_vsw-9365Kw+1L)uBO@DLr+;-(s<^AmA@IC53s0 z%X`K}b?RApEunc*ltn$O9r#Tc5ft5UVJyhRq1ZVjjIn(W8y2(bX%3CJ!$h{KmL^L3 zRP6broWyzuwNQDZGq$Asu;d^^>%1%BA?KCPBPfgwE#M1|4ywyUkJgPTW#P6Y+6Jct zZg?l~QZ`2!Xp>7=oyVZ7u%ox)xU8Hs5A5)8S|Z97D?xxq^tWtt(BZLuE9OA#Xcd$V zKw$)75-xXv;=tTVIq_kmrXVK#@l~h@TEmahv0mlds8R8cBd1U*?KoWMa3+l_99y?AIPlgxU&zFXz1x7VxYX##S~CFA_g?T*J>Q zAt*9DkI06-$g0G}R@u&xvG;AcDy^3k4Zz_@^D}QX6xil?U*! z{mQmn-kBcSW^^_aAh2Lxkqx6yK#Gl|0DQu#7&z{BB7&hVV00&S!QUz}-+er)3pN6F za@uK0-g=@xN&P0={G$?if3R=Dslod_-Sc z4l$G<(Y48!Wj-JwBBt}GG+y$j)!30a1%G5R+|;HL>zYo4fB?jC3EX(!P702Ez{pdO zL$Pbo;>&z5rVAWUx*h=+KSA&*}I>0Ahuas`RKPraE~9_Rz%>x+t;r zOs-BvR*04(8Q&T4FUwjQ32usN`nbes>Iz_D;zf~bNu6{IFF&pAh{W%GmZG7nZ0=aH zmt~6c=~Wk$n!phrfD2@9mKe+T%O>_)Bieb22)*|XL=w-R@Rye9Uf+^RH~fb^I*;A_ znkVes;4Q7saa8>Q0-(sdXp2LJA;}l`!oYTqv^fnC?mS6%0^li`Q3n#^rzibHso1#C z+c4{k2703Xh*$LR~o= zqOB0v-_CM$fL%n0WAt`WSqxJn(tWWAuMF)Cj*&gKh!L%A|8>zuAZOlqYvtJVuZll} zy*Y5;Rms-;q~Z%6QhMev`Vn@tr45Iedm(I%;Z^+Oz>qs)8j7DkKCxi{p)cLjR*ljbioYTZU#Y{Bv1PM9HH><7ypKm zi%G+|*}Mk7&cc7;w_;fZzqdUNbUk;{Q=Kj3k4 zaHQJAOlqoh#O7z7ooz4kFo{c<$6+-RH{pD+u3e;m$N)g_fQATH3?pMy*~c_uN9C|n zvnhG?5{#II^-4j<5ib0Kr%Ui2l2sKE63TZm>8xbLLHcdPHOIvIYbiD!MHLVhC$$Hi zQNKMRr871>!}>mwF@rc~lJ7Mrt!+}ZErZOLRw$!_vlX}~1E=GmH9nEo5n4W#lF2qP zhi!2Z@Ce7507{#hbHpHjkCrrAdXe!#9vj)B?=L_tiJxY~A%o8M)=y<~2oY9B5@8bF z&23Nfvt_7l>{qlF3QixfIyjQRg3~o)T(KNxs_^kq?is?Wu7#5ztN6nsM_N}SWa-|S zfq28uXMoP?2&`(7rhmb9{gGn29KU@U6=!aG7%aS&CC4GgN3VvMl-?tEKD_|k&9mdGEK}FS53T&4XQ!$m& zue5spH#K$*erSs64C7rVkY)w0g(_&~ywKjSf7lMc`8gPUryWYfc*_)nh^|5Tj%kva zh-e>NWy=qhMbsFVj+v^fbT$6=F$L)MZ%R(#!x$XbZf$!WMA03iC^Dm>-ct0`N73SH zG2$0d+m-n824hszglfIq}&DEFQ2%ND2-<9>3# zSMx!TrX6(yqlW(n_&YvZg&7FqW)`ayo_&uB+w{};&k_MU3Zv~Flx{sb%+96!Hhuaa6tu)a1^s(|-gK;J zbLJfGGHii4&htb~c)=g=dT@S8>R2jJ8vB!e9ETU;$EtKcFCHi6gZPP4SUi)8;JG<~ z8#yB#wC$Wxew=7L&bdZ9jyHu1s6%C!dx)brWldoUGhp2CTj`1To{-D84R=2e8v^h* z$HaCGX**$Jzz_Eqi2;|HhtC$DeO8QUph>-vR`TbrQ}Q|;G7Nvk67TpSk_Eb_0i9LC z1$_NzfR;2!0g*$E48ay4e+^2u!5JRF=zGr3m|^$@oL3K9{5lsgq%bSdCB;-WM@3ES zoFO5`MJuX+$^FW?BVd7+b9*x??V7G-u_=#=Sp_gp($`sGzzO8kl%OdRJr}UjzbW_2 z{FmEFU6edH#kQ@?Q^f1aY5p=Il8hW4eef!I6cuG+>}?+%lTPt+;BQd9>aR}z^R_GP zkWpA5EGg+>sTBs$ZMV4o?f?2wb_6#tMX${pJE{*Ab52QdVZ6>E{yS z!3LFuD=~D88tDS843FAKnxs`cmw$3Ln?^7d+%QS+Sdk%a_VA1m8KezbL?J?IXtWc_ z*{Jpl{&>wZ{d>vH3cDr!=#)njStiF@2i#=Ps2nH_)wyPrIoWic>^dQ3Epj@cjz-QGjC7mFLBwmAZ3;)3~p-B#+D~CmO5Jjw6K!@U4Y!)!|`iQcBz}oDAEEZ`-y~qXigjf8B z*Jb>YN46D11Wj@Xq6uCJQH!cR2Kp`miYdaFt*}(ijp8Vk)Iu|2xNN$}v$RG%wll9j zhFfWUfZbxq0g+o{U@PU9L9W4A&sCADk_T}Caf*3sEU74CoKZ`v8$GoU%(FbkC)lIj zAkF92ySEs^b*Lb;4CWel2Nj$dfX66if zh)B{K59wc+{~#?~$04vVrgj(*NXs;uNTMv>a=*j}{WuE#_<29+Uxojcq9OI$zq~>1 zIZi6zC}>>N64J+hNg{w7nu2I9L|t5oj;OuRp*z($taxR%nT(5mSi|h~!Kp5bBdu4F+VGl}0$FL249_#caZ8~^e5nL~=0=hZu= zCp*{knzE^5d>n>t;QLO)l~GpIM8Sh03L_cjSS2vVf$Y_ST(eQ zO7j<2h)1{_+wEcI+@XK|BAtGxgB#IH2+$q2kN@vF0c@#(UZ5!xEzp~gNE7#|On;lHIpciECpO~ou!w_J8519wJNRGEO$^*=FW z>BJur=M~+IU!_eA#FR$>1W(zO!jipe$e#G`OS!Mf5d-_U1;8EdqGzeby0TDFtzOh0 z=H>vZEHR?|%3kZ9(xrmGs@j9~OFginJ@LsMi^~}9->uGvVsjwsDJR@v?mlAYy{siq zC`6}BwcjXoaMC}Eep3D+X$A$q3Vs&-oY1JU@Fu63mYxpIJBsQ<8Q=c_T(Bv539&5` zH07aZJ>iF`Adk<@MzBz0K|d)UFZfwFyiBwM|0w|~)8UAoX5rr+*pmfp!#6l$wYs=y zinTIuMf5FoOPS9>Bjvg?r8auaQPNDxpE#PAiDS6Cnx8tX?l4tLo6k4R-%m216pKk> z3a#YS)_BERTn-5%=|^^SSmOVNpOqzO{>|{iML+F(?a~1%zV0&6OB4T1f%Pc_AhtgN z;onJeT4_N;v^SHIoirE#=R~&0*y)=UnFB<6B0~Ez$;L>dwS{RbhLfVb;nM@wqj;~^ z2}pw*HndaPwaEhsL%!T-y9ezFHOsOSUoD2b=4RD&cpz`6<61(+C@%Zx5}X&_`^R5& zpRYSO5Tg)n25@0%iLp~TM5ALA0V%!U5gNHzG1Y*_JPB4_F6q7o%at)$XjgGBuESnP zj>ipr(^ES!(Xc;K4JCbqNC~$r=!}SB6U)x>1HVcQpzsf-<#W>yewAaj$VFK`I>@~M z#s%}o?;UuBf3Dal!NmE_!pNH1%%n9`6sX$6`tt)wZ%LPSeF#bx{iw+1xX2f0p2PA= z{NAVNE4cJoRy{zbsM{05gLGnOwM*iUFIIateV?=5%T!$wtIZL&tJ#&rygEzybLo<@sG8V?r!QEC8%|GY~C>tmFLMkUpB&VJIU#jEKrpAEv7@?Q3(?mJ#&peHar?x-hX*hNR~`I`?~a!1K(y7M5|wz($GBE} z+ywMjr1xdC9{ac+A7=Wk>aZYyw20HV9w|dJ*itn@ zXYx;I+LN5pIS4PL-wf~sJFhe(-3CW`-XS^xL|1v5iEuk%+}WI#&U<`lWjS`Nfkvf| zSv^fW3svIE0t2brZcyzK{kLNN8~g|_coL3nrdT|A{i$V8!I5^=)I~o+{4VN0D&RLV zGa13UOylt2Kf8kKtpETZ07*naRHabUIrX5H5Bj-4!L08vAeK)E+KK@!*}Ly>GXX=` zWrdcbb?~2qkz2CY$Pgmy3`$R^0ZaThI&U^@d+0atA4c45Ek#api$h%wQ@a+Ts0Njo zC~1kAF^~=(#?wpL=d^-c^>OG2cB0j29bBS6g1QIQw^eaJZxT|vlB%lW2uW_`VzsK6 z3cf^!HZg^00zJEfn*R0V-^RZ#_#1?W?O!+2?Gg{N;qxHZDJ-!`kGasQJR;;E)vF4E ziXBh-@6YPzD)?FW2XohY(@$I8w_?i!uyJxx3oj~qN>Ms6Vx@Mha+Rg}RCVw~8m1KR zpKH{6{r!d025|s^Kz_e`0(zkb7ptWXpa4(MBjuHDeCkUe z*IJTYSpc9q>v(;7i2L^L1+WqZ2>*r`G?y}eVEp4aHx2z#V_nsUmWHK0EozKmD2o|2 zHk2(u*dLsBeUeYvp3LE8&S?+10l%^!q*Q&Wrn`$aEnJxrb|-3*hks4-cU7yen_ zQWi1Vs7#tk7pUqwqNd(XAgFWKCmGYK_tw52`uILb!JnVBb7g(_tzM$v(mQ{^k0<_l z(GQFMORQfm))~8$Z;NNT^uz=IAJ6-hrlK7AA|a#FfSV9OUFNl`Y%P)Fl$Ts`IhZ1{UW%w# zrpPG(d(w{;{j@_!Q`LuL4QodoAJlSRgg$>|q;HKcoa{&d&p2ERC5!JJWNKI9gqK^5 zv5!;u7os?-(sSV-AE1)zGoNI&IIpp-D=sstOL5 z1~QL*E90h?-%Js&J5BJ|y6WjKRUk{kAT~0Gk?@!Adt%oA1%7Y-b!gOL&W5X8oFbu| zxACc{KFF!BX#=mEJ3AP+aS{ z3%LLC@Y=u~#>r;HZDU`8VR*nuDA!XiCY_9CQ4p2BN&rfPJ%WbG8Fm8Ds3xqqr+V>H zv;IW-{o^ftRF=ugW$(=8y_p;WGx~;Fc@!~d7O$jb@x=eZGb$lg@RvJ!#N{KD*biOO zlNZb%;)!+YJvg&24o;4gm;#t$C?+VP1{p_qr#?uP1V+MEKHi}pTGf(W0*DPtTLxV% za~l)<+i@-$i^A67ITg{XI;A+&k!^X%81(t`2mP?S(;k=7XQ3nmcNXg~8#9ypien0Wdgo=~fOPYy1cO0H?qVMI8-4N?YI<{`ocmF&5{j zu^>VJbqhBwU8kV?;E>!kNNyL#(E$J~2Od7~-`OAG@Na)xS|yo2PHXkHO_9ddh5uUw z)wO;m!#?gviINDZr|VHM%kto3=3(VvN_zNZy#)MMIsXIyZf9KRF(9nmufEDtpHjfz zkl(_mStf;-MwI@z6#T1l%TgJ*fi^K?0cLuOj_iD z#U7*#mH9!lgl!Ny2T(FJ3-u!0Tb5CVw%C{k3jeJGnhe_OJw(r^5%D4V_3I6BD32jj z+yY}aDN_>^4|u~5oHkBX4j@ipW!L9C(TXsmf&)PIRq<%)i!=Ky_kSPoZJ=yB_y0-Mb}zKR&4CBcc-jZqRZ`a1y)( zmR3r9kIrFuV>m+XA$`g-K+!+SCp-2;@xo!|tR6YKq0(kkbPPC%*z*K7 zcG_TIPBSeVf=wi0Q@3Y{+k$xDzoB5^AtQixT|~X(0kEDY`lW--B+Dr}X~{%Y5>u`O z{jf;-o_wNnT>4i zX>DG~_AWqdxmmiFssR|KZ+R<+57BdLru)eazy9{(mKi;HFZyI+_*ERidm5#;r<=E5 zNxQJLQQl9@{>+>zQhEs$%PJXkND)QncT4;Swf}yu81&+uW;SK?d?X+%@b7(9IBM)9 zy=wk*8#vk5S=E-6nary>qO)}vA5bM?txm+Uc|G1~;hT|1E#i ztW>ke0+`b;!i<_PRpzqcw*$ZQi$Z==&R^+g!#2jqK7+_3VF zRD#5sz4y$os;&(Ou>V?mbsyljX6CPJNCBN4{1l-%=v_ug7-e_FmW5g1LSKlas?m;L zUAn(?Sh3BSb_RSN=a#A{GP;9|&sP{BxsYrwSVRZHi{Ak7#6JptHjpZ5-Zkbo{`;8y z3CAXV{>%R=iSX-cQsA0lZ?Ot1!6=6S3SkF+bt<@b^7Q8vO}7Wyn9RA##Q^d4JE-;F z9=r(Vh8)G!PJ_qd2BNEIaM_wUA$Aug8^sy&y6`Vvw%e(EJ&|7>Kj!gK`S@!N>92=M z=^xA(nM~Gix&}9ndFVegDC#PPPxzbu51+E0=d0?oAN1aYbW_WJzn#1Vvs|*9aD}wC zbYBwxAMiK*C#3O1$4m*GYj#jD^DoSmg?PR16mYF;HRk{On9S z)`GBz^_NQHN!9_?%XGtkX4L3n#lMQn7OD5!?*ANM)}k<09cSE@s9=L(Yyzx`)lhU* zRTgzVu1*Ul{7FBj)kK2MXjLs$%0YB(J zy7=N_?Fz^yYD?4}y^D@P7XU3o`*G{q#kxd?$t%85BrK|2Ylx zf*%EcE9Ls}p63W<`QPP)-KN7|U-S!p7V!%ZxG=t;n92_$+-o#@^f9PRuKbKDVh-P( zMgN74Q)Z?M|3PvY1dE7~enrnebl9x`ItPjCTm&4pFh;g(r3^guzXSg`m1AYl2#AVl z(=G#FCHJqx1|w&lW>uI`4L>gBbmzVPQ{ds(dFe3I+}NWviYy&napo+Vk|3xgH6hgi z@scq;3%4qhgoBKuB0q(@e*N26{t!TSU_ETb4H)RA*>g<+SRBBxV-`(N`!R(g;F3B` zmx)wb`i_m_`TQmeKE=@Q2mkgXiw8M%VpZG%a4NDK9_BS4299uI(~tb5 z*Qx~Q8I1eSGU!ITx5-k!^-ByzV_kBhTkPON&oas=b4CBEdNC%c5sV-Rr2S$YN2yuRdd*+g{Y2V-bG~s zo|3)U^F!R<`FE}M69ewXM~`S{0SYFJZTBlU*Z37b5%n4zRkSH`z~&n zahY!YV?4Z;XBg<9X@G)PX<~$CH)j@gsK@R1C$+;x%##~OHy2PtE%m^LlgW8zepy9(VunBgyv=FI-_KZzt= z@D~|TP4pMDn2>k@!~EGV#s2+ zfeU_?4biWZ&Q^@GscXM_*6Nr@syc~{zbn`O^2%Q|G#<*MLVv_@S`2^}{Wtu&01N!1 z!^?$#UDp)&uNt!|5NhK8?+UEn;7_{p`d29iPpAxKPQ>?J_-|x(z`-H2YVWw<_kzD; zDS47AjJCf}(i7|tCcXdddZ9W|RZ>RqHhz;V8~8uJ05&u({Nutu51x_qldgRGF;BQ} zL+AI4e`NacoJ!hT|`GVSA7VFhHeODzzRrke+JKL0?W#^7(v+EaSo(Bsm#_@%Uu zts$nLwgFg1vU;g>nZU!=^dDN%yE8f*V*1Z(FRiYY348Pzs0UN0zD6`m7|U8iXYKEF(nNUU0{?$U4gdEvjQZ^qlAk9h-8Mo0c)m?>>I0%3{Q>BKSo}iuZ;A9{r&I90e-g!Un=YKJZVgN3^k2T zP6HX1gMnnk6l?V$0+m@80f3)(a6d2H|IOz4s|?kzqcsir^KJbeA226g!m0#@(a4O3 z5?14gd)#;x{i*Pe^UVc6ammvPM*lVqT14vkTl}3|KHnxF`{3hK-WGDkXodfvAJ5sg zATm^P(@*0=cgaQ+pA59^NnBU|`|%s>`*~~JyDHh^H5s~Q7>paRa8g){3c4>W@vke{ ztN)j=cTJKe$FarE0X;ISx-}ZRyU-f$HIvC?GVOmg>aGkDM?V12;qH+&6a96Rhi5!M z5bqaH3{~+=?1lE5TOGtxKj|9nK~qt;Ym^Sdxr(j$31hVLH2h=pV55I4VY8!1-49m3 zZgIsEn>Q_W+l7y^sR!=*n>|BQ4rdIO#h@|Hw?y$TO8y9OhU+fMIN=Y2b=R<}i=Z<` zT7kq4%#CNeyt*ynwlrii$y2-|k!pU(2eVB{s)?!fB&VA8k+eI@m_n$oEsPa(3Q`A9}v)2i6D zllwL+wkjsz23zNXkV(a;@Ys3P%ZjX)5EiWEx#p)i+h9%7d6Gqc@EX8Lz4U1|G;9M#3CQ%jzP`I5hs1RX?BA0*eC* z+BR|Qrgc9uf;|Z7w~PJ};M7L|tIh^B8O{o9Zc)?dM~!}f^Mpp>GT)+fa4~{it7SG> zZa6=xf#)G&Omkd7y14Kg{hwer_)mcoVk!$;c~(OJ+#(c+hL1CohbttTk~eYoi@uhd zwFh8=qlTLVwhqgDd||mUaFF}BNTTzdTQC`9)W~&S0J`~xPpP_1{(ga$zKbu3XlM9} z>%d^O!aED9O&PN4iVT=XQM`ecljH)_!F@nGHBQ-@pmsv(_FBK_I&23$)i!L_Hn3K) z4h$56g(&DNd30snDb!d}Q|H#ZNU89Uu~*}Jm}|48_WBl`df z|EFduPVV%R`k*MsnA1dU6o*G@(||#h_mBX0zU|C;V}yR;*Hy$8gn09CP|7&A1PoXF z>k9uxk_P?65QPCUPKhnDF@Ve@_RUA%ykOU|89wmOM_mn03xgJQNi*mNhSA{PLc?O* z<2Wt3O>BC=zXJfFsuPMX4f`{$*Uvwp)f9B0G!p)xCvI$P7T~te5azOtMpunZFis4c zAr21SrXH%YOZacV+=0B^>>rGmYuMp}^5oy6i~h1Yw;j!`nY;AK@~qQX<~Z+PWJ)dd zz6~m3>aaTSRfz5cv@^q}^*7?oX~MPuO``8!a9>EV?v}Hs(Pv3UC4jEdXN#WB32OgP z;92RdeG%8QxK|~K{MgpeMf7Z|v%t2xBfB%eR@E1yqm~1!Anc6?_Hz0{a9z&U0eWbH z*cS%;aSGH0{wMyk)^B6}6R~aJu`kdl$F57bF~e`7e|=hDy59%vji@F#4YVlQJISE? zG27xvHPG5TM*Y#{<82pK?ZT?+tg|>`@?F9g^IF(`Je}Dfb8#Ru`6}>0fKasoz<|fOdczLvU_L{3sX59eYT)ZXc<`)_3 zEPEc^t}k(OvlrMb=uSy#O5Y0FHtUYIq?_Ea2gc*E+oG)z6Ke50_9Fl<*aB9!fVv}p z_f&WS+?Qr{o-A_#*qRLOHGDV7(U9BqZL47g%MaKt!U5obk?;$g&3xNtX~M}1&iDJn zg5G@l0pU;WyC0omIzROKjabOLzHn}n_JDznfw|?#mYWa!-U?3ai#VCuTvV@ zN;kk;U9GYnw(p1=5?-*hKs)_xnR3B%k;Im^YUW=5rwTZv>-PFv_;I~PGa9-YW1hrz zv4KXy-zoVfK=FVdcCqAAJayUH{osIqv%oFvxq11fy*3Gr7EzqFUnnjf2MCr2uL9Ca z?r%3ecc-oBl%yYt#K2Ks_-c^bgRc56bb`0W)UkvK2>8m@ywbrZ|C(jjk(vRawl%4e zuS@8_bltZ9q|pTFO^t2*cg{u0oQt_!^s|9~11|$#fb%%dq5Vbr?+AO*)(!{Rj)!*0 zx|4nrK)AZ?n*p|UgZ9q86MZq~M@IbahJQnNgMVw_9@;w9*fn!-w>obuimUs=CVK43 zrQL%WjkkA1qTuHh{w*uf;NN6)1{|-naxUs7BJr>nI+FLx?rHIA@>tUfYAZyV9CyZPVAX0Hgfq8&xqd|vRU#(hXnv&$XXxl=c(dfBW_ z8}`_g-$Z{Gqn&$E7qh=)1Af`q5gdCHv|YdpP~kNiu( z4)8)h{inO>d2)24pKeuLgz31#-@f_0)3>0Drl-t7(ac$XxYdKjmu&jce;fV_wQLJ} zdklM1jw=;)j(Bo$-vwSPAXKJX=V%%L9*teC0cUM7cg83W<=nZO^toO0@h4nS+#eHo zT@zw`CwO%Wd~9k^la(8%n^*5txGn*Y>;15+8PVUM4*ZFK zcB+2Bf9KzNd5J>Yc=?9>CxhJB`Sy5<{vA&7wrac3&zVH+>h{ypE|%O!^04C5;}^(3 zKKOut(@$@D*_#rHnC%xni2U?X9;~rfbg}S$F)S{Sf298>{&7tmp^2a!%QD09{*V9T z-M6HE9u0}FCiq5`Rp=_s0T4E@2K;}=&p$E0q5lo*A6Vb;KQO=G2lT@G8*ac0b3iXl zf`*yN%$zY6W0q7s`vTCaxRuZVAl~o)_kBgmxRS-I5p)ks?O4-|0tmeD^PhPC6Yqau z{uBBG>l^-sbwFQOFRT~*z%uZKX~4i5>8`o~hUgSTl|^pY91-BAvJMyie*Zrm{+ror z<;GgUe-xa$ZhzmtFX$WdpZNI?%zxtj2mDX?H~fIV;NN&3&;dW70S(LpAT+Qll(cFK zP+qMAtlHJUe{=`){rx5P^}0-o?-s+r-07{}jOa{y$fN(P7wI@T#2FPDgK; z(cV9<&tJed@D2Te_kZH&53D~iU+^!?FRT~*8|%P4;3MguML)NHYiD<`cOmgFz~=jZ z{s#Xhn`cWwrn|HOPH7wf=0unxfB1F&RgaUZBzV?*i} zgMWR;h3ET!%?9vo6`g)=#$wl1J=7dz_659Z319Gk;{7MqH`X`iH~b6l1N_4Kg>_&Z zNjz&53v5ATyXCrwpD87ys9cBYSiSBzu%O&(a0~iwGxnXR=fg0G<+Ov%?i&G>a7t@Ai*9VcSZ3!|zw>c@|gP_;~;$gVDinYe%b# zE_Z#!JZh51z$%j6DM2r{G&Z7=c_Dge$|?zOj_iUQ%q(cvG2gnaWP4~<+x49US> z4%0P3*$~@64nc}T1E@_>11=2wol@YHdN$CzL}7ni&S?ic)XdvpRY)r4?CDb~0jy{* zEf3o1hFd-Z$fJFt@w%iBh$y>@4cVx-7O!9&Vha z*=c&FVk1WM*99RVmuZBjwFcyIKWV_F0^UE}(Yq0@EAjmH*!+$wIa!t!3;+kl0U5>t z4GhEEFfSF=EjYXt@W}!RKkJt)3i|qCB1qr(ih5ixZW#u(zdA8Vnk1hX&|DsFt19Mz zs{Gx!Mi7uC4Kml$wf(oN#JX zoM*>R&yXcBvbOm`Kyk=xodb*ld+)&xD+CXmBU~=<-x&2~CG9sXAdWmn&eid$5Zi*& z#9jE*(@g}ceRo+-@!YkT&J;a%6`+YVA17G{6@IkQ?nF|`;OWhH4a?*((K&(4u*{vddfg{mwz#hRB5^ku_7!Co?2yc6muV6rjT=%-Q9gMW2>$A0+? zP3)zm{>%PVBBt56gaM@{ZdsfWLt?iMW_t5EWU1&s1dtW{3i$W@R5a%8*;idDAZ&kD+{&>+Fgo!ZvuD)$Ag2P9tl8Sn{No18F811sR~ zCJb8r=%(hQE^y`QGpB!1dt4xZx9}%iasiSAx|iMP2}{I+N+pDvowW-h2`l91#LIGf zqTS|o?mkc9dQnTAOFOBsWo!G*b9X}e3xXEL0bk-0M=ZlNq_B^OlcM6RGNvSQ3?}5^ z1Q?noqY8fMvJu^yQxz!xhX312i?nP}(s`y-Oy-fVkfa3YlNdwca&^xVh2=L6fCl)E z2%H8G+>8DT^Rgm-yV8=bKzjXkTwP=p%e<20w1hYCioutFWYSRh1^}ZOO~J@EGW)eN z=PSd_R_=?j3?Kmv_}TQXt_-_9$WHAY`5#GZGmZmqNE(1IEC=)WE@8o5J^`A`B&H7Y zUe7}q=rF=O(LS~X%})PK3g!ImP9C}fwh5+pS@!21O=1m&i(#tL%H_w@?kGidGtn?D zPl?{f(8>{H7QU9@C@#PNbm$N+9zg7b^>`eLXzaQww4K2DS3wj_S^%d!hOtXA`Eo%a zj6|1#T+z8cx2wwx&xME@8St?k{S11_A>qv;O&H2_&Bs&v+^hy%-ymRtQ(EA{fEECc z8Rbhd0F(n5(u|p-wnFaj?1hk;%)phW;*E;95cv}4ob&$kDoM%~e|Q?9dc5oxAeN2= zcP+sLxhTn*2o-L4m)@NWS?0;lW?OAe?GUi5G{i8w|cgKHX?&H7`&Jh@sM?qUNq?yg2HyTriT^G7P$FV_itE^tB%V_+^>jYgFaC%DtaF+wpI zLfXR27v&^zlYpKxCFQySPsD? zhfKaOAdmBE++hQP+gWfD2GA%TPTS2-|1oX6Qve|eHzr(jJCdW3PWJ=+$`W2PMOW_P zOPUJ|#p^B%E}3ge4i6iMf@!!^Io@D#AS31$S+66-Xh{DZ5#k9I5}xxY9$nX^bAcxh zpcR|;cTRA~9dg)&0h2BV;DXGtRN7X}oIr9U0jC0Z0~|+zeS3Z?uoDK}$od9hU(OAF zD>W<);c)y!%vM-*Di4Jlt~vCA|M*ZI`2rv#I-oBYVIvEEGX24x7j)Qc*tU9(WV z<>|IkZ@RhD+ai`I_{4B&;`oZO@`#v3q#%^X;bPsDrkd7v+bA z<5h>u-9=%yh&N&En|rZab8K0>1?U1_&g2CNk5S13G!qO9^jMSahkCoSW zE!5izxY?9qLnh0kdd?T<3M?M+RbMCY`6_H-K95T?IBPemS zu#Z(NF7g3ndH~&jL1Rd-w3wQp*%W`%#`O`sg%{z{OzF>M22G3Zp*rJ#e!a?YFzHX6u$;t z3c(J(il6GbBTlq;UU=F= zeRXKIXOQC(9a@rO1UX4kh)rb01Q6L7IDEL$wnH=;sXXA|--<{C{U7nu+Q=;0I1LD0 zS8==f)CXuDm=KW@ER7dtcpO3yxw!^hr5NH9o?|-Qcc48jQCfNcc80KUoI~`K_^i3T z55j(u&5fKoAr*IMLRQC|A&;vwVG2V&W7|BVBqdSdR*}b3;Dpc*52gaIhmH$yl1e6? z8_-RRD{y&#ws9qf0Zc&?CM*Hd(`tYt&Nd3_!JxZY$em(E1Wy1Jy0TPX6b?Ic0Na9S z+N%>#S7|+d^>IILJK^=I6>)Br?p=T=nGe9107N{_(|$m#DjW%}q>>!HEgWSv6Bu^4 zKtDZpEMl{0hq(iPT~}nn=L8oH=`V*#6M!$sO;{LlLbnMMqI;A8S!T;rY2FYtLqNcf zOvR7067=~fNVrwPb{V(O@lq8icHQugb0w})@>kf_m%HtE?G=+5(nI2E)JQy%S;gE5-^x#W)z$n ziGZ;k$0*?2Vkqfn%e(Fb)M`*!naf74>r4MsF;N^5VV{sV`C|@9!+l96xkgFM43t7L zlrfPZlpv&V+~axdLX=;bz#sVU!m(QwT?h1ybHFqHX~1&9!L+Q=AWvP* z)9u76`PWJF>CmjR7(ITyO$u$4b5Ubk4eOVGd&#Rde1Zc;I>Z^lTe!m=gs(L?JRIe4 z#KLEEgaaH{Oc7Mm3d7<%6wn#Y6Oo1q3j!HFOnkZkgLwv3Qvp~;fd+vU)dcCp#37=M zLsi;no5MS#uv$un*Hkd-Pb&=|i`Z=xM0k{#xg)Hd43#qq4Yty-3`U2>st{sG%%jtP zvioL%7GqE$)3y%iG25FM?W$t5;)0mbJr!~J5gF78zM`Dw@JJzzO6TtK=&CEs zxG*NpAjB_DhfFsc=O-63+yPy^@7*8?hi}yG-J#K$>fWOD&dtxBU8rH zO_*|J91esQ!~7Z%Kk=rhZFb&>+;y}GwWLYUA7wln;6o1WBj2BAWDj{ zqydT2fPS>i=TJu;!NkevJTaspILZj33jd$1n3D=Gc3F>f>3Xfl9w~WPl0x*j%n|A~ zPDpopNc>=9L)H`sFlfmMc^)|%vBb3CMhZ~aiOmA{qgT0kc3dI6dJOmXpUadv4#*vo zh`J_|Phhxfh%S!e==fynW_OM<366-IMC0kEAD*a8+5u9T@kS>-mWyWhpoKtZTFpsL zZ{Q0iE{TG@beLIYZH-gNF7#UC2gErX#s$$x4@Ap2t+v$JW5dLYqLA0a2V6r%OeUBCX3BV zjE50Dsz$VV02e%ddL&jOCf;P|c4a!dR+dr@Uy>7nprs+XYb}}95)gHg?y$6d45Iu8 zO68H*w8|h2%bwOSsLO&o`v5ofutY$+*YVt~&fC&&N?PKAA{)GL7$%&{afr)#;Xd@A5uP#3j?n>Xy0 zVn?R*Mg+B@!r02zVEF=B))9d?M0M`M*pQFYkD(5HJAFd7g-UzHW8R0eqp?+*K2p`94-Aw_)> zqX5E$&nk&s-#@Z|%7FTojdf0P$_c#Xe0&lD}E zC(-y^fJz|WbX5`e=_qWAuPit%MGYTU5!lMk&)P!0leIcQmnGN6?SZHGV}NMke5 zev}v*BR$Bg0ddFv<<8c`mu%`s3%WKX>q|UJ1BHblUrL|S-Q5sLFN@^@RFo61mD`xA zE0TYPqrzFZWb?Z4QwIfzo^-F;%KewFh8yMdV3)$m&0aQ9QQD{Bz7~j;06So^s6?b< z-K2Y(3&FgRLke~n4+2+glQ~2Kp52 zZ;qumK>6}0RpKu8VLE$4CLLBte`TsG-g}h!mUd`||AWQyc|~Q{TEVZ&UoNrf^H~FM z#}c5-*0~A391-b(!96!Pb2Cw7th@u@*rCH6KLL!D5Kamy7w``ISVe~hS>f9L>NU?HU$Q^=EF))h)wjX{P zm{Pm_B=`$6BmZiW$h5*8gyfz|*JC;k3|<5CJ(}JqIR_zfqy@_6bc#hkbjW>D|}1P|@#!)2x)2qCOEA`@2ZmzF5b?8OVAfR79e zZHTL~6~z@NedyfTO-bE_@wzEQr8Z}r!0nKGc#9IbR+%{rO(nr!FtHp-K^BYU;TiCW z#8-9(jOs_#ol114g%jpt)=e4P^_>UKDjE|2U&2^x;!swPyQhNB z1k8ZHk$jT28F%N)Asiz$!RdirMd0mGVfg?{$OesVO7oM@++P_X2#Fc-%wPlPj3gwM z0E54?AQ7&t;08dbI4Fa*%+%57NyrAjPV?iwBUf_jrqAZsw$Oa+lA)}kQRYN`TCl zF}iy&!t|*(siBmBol}^GV20Pu=Xs4X!`V;1`S*3+h<(p9#L^o$O`p9f8x7W^=gNqv z7KW6az^Hs=e7lEV17sc;CjSwlZ@!99(v`GKXm16!yN!Ocp|5u~@Vb7Y|7jZ;JnwUX zZ`q$%s#>55A;LlP1=+bz#lumi0#iobtW2LHs`z>c$ghm}d^9l>Nh@3#IP&#`htC^- zS<-SyW4a$O`1F}&ty1U!1E$K>3-gVrLTam0Xp8hwWm98U_H>Sq0DBWBYzpkt%C3r+ z19E9Qk8*ibZiLWu$?1X%rmz=6R}NH-beee?@NpP3hG=C6Z2`&)Zcme7gI8HKXQp;t z=c08bodq2zCt48b;M7<;q=|!Y6BZXehUKK+VlE#c0<{F>1cQz0Vfbo24ZC)#WALLO zztK;yWgU2J4ftE6?D6oI zGQ>OgUZK5hP76X?q%EtW2Bt=es zTXQ9tbaLR|pnYGvXxr0n?E<*tKUXa9EiP2;bJ+SvxRsEA#jI)7t^{Rdpde+zSc1)* zp7}zS5#N+&Ur2{v=X)GL>rYa93mPMo^gq8dr2oQ@q{HBEm7ev9Dxz0zvnU+$pX|pu zPzKNrp;Qw42Kg2KBFqM-l0xwC-M4MATMo~VJ0bC{@dZt+m*_ED3PzuTY0grXCV(RJ z7K~bDG~jgNtEBt5oWuL1_$L`vm$@1n=<;?Gqi-;zc$hgMD%aBRL-cftCHM3LhVWL! zB|tgrk>TQZG&{SA_QOhvtg$WS=A>WL=~Hl9RiJ5id7X&Gq>$`&b{qI_DE{?LaHSG75=tpQBX{y_ADI^%BMqG zxlSQQvd(BY55taG1G=>r!GTxnm-?uHJN#`I1hYmuAAPU>VP%X*5+>xZkU8!q9Mdth zqAzcoR_tm}N6xW0DxrP!5MqOHU-T___+-*$O&isne)`h86OaRGB;k?vO#$%%#Z3mG8irckjd#2WZ;Z*g6mt`RY*|LtI0SEx zq(jeH~lKXb?!Zcw)B7qy>+jktTh~TLMZesLeG(7g@YUo{4G64mb5VQHhd|m=% zt^-=xrJ4a(Zh6!M&G~OZBGz8G@CLq} zZ{RCh@FEO7QY=-0VkBeAzf6m5_hA5xl0^Qui2t?)IBj%C^;kEnzb|^$Dx7>rv40pk z@eNMEmc~o5E8FW)RdWOZRq@ZLK879w>Hyz>c&?)25EU>;nz6@1H~gNXIM50TV$53B z4rELsoB^3Mi+F^i#**1?zu0OFl?0Ah+tb*&xnRFvhf+c_88-D#m`4GkLrxDG7nf|f z92}t)R1kQ`9%d3UN`cdDsYzI)s%U`Bk3Dhy+nonJErH}_gU!PWh!Y_kwuI%?(I&u` z4&)511(p;hkb^Fg#0(%jMi4uWvN}g;&iT#az1u1CCLHbh-j~l*QZt{;apomky!|8aEF1 zzf@-A%={K2-}`pFushlW!JTQ?h6hu-47a48S7H8!{|z%g;s18$!|J$i?3|>|&qc37 zG=k{|{K_>C_az)LCOt-mmsHIxWv0xt3dJy3@i%1>5)T9DhTiTK-gcqA412D7p2X!P zU=BFK7L7W3lP}--1VM*M$j+Z=TFQD>LY`u0%TUBIKYj=P2Hq2)b$wB^HO;A$jkRZ; z8x@EZz$LxJaO+*e^e){yZK9s{xwgqR~38;DKpVD6^oS$MR>0!`Q#~bjA39 z`9VlYaWMm-r8JrX(}Fp%9!^Xb^!@ph z@XWUs?`)jk6rZOiHpg6<$=KCtD#J8}2@Xjtb^(xCrL2Nurom1Hz~jundmBy72gDE5 z14zv)I!NWY7FDB~j(D`uy3cU9GF}ql!`|Al-cJDduNoN?`aBljiDb` z!nk4nH;UTy6DuzrR$4S+4jt^N>dYnjJMd$r@f@l24BR4Fo?n_(LA(0)tmwcwnc$=L zx{0KR^uK8Wrb&TFmga2> zaNV8XuaknWwOz6Js3VAjJ&+!J$f}qpLP&6Pz}z!}TO2`EwNjc6<;U!Au_?dx@fD{v=pE zV?hMWF&5?mGkH;Epe`ep$0(W@{u9YbhWR!V6SXm=e(UI`J<$Tq19;j9H*$II#C=Ox zJ%A;8V8ZfPl<1B(?C=?;G>~>(iytZWGJ{j)K0wG$$X-W3klZ$|@NcMY)Z?GtgOfA> z?nEnP7Ur9xAPL)DKWJpi@_#xu#hYx-H3XSO(4;Urz zx<30`)w{1x)@^5$?t6~NNGsV~oU$W2*CPIwEP#6HIj$HcEGGQc;4dKU3du()`lOtP z^-zDQnuk*mMHDfZ!hWPbv?B1h6wUBw{0&m{2D@M}wHsq(L)CU11#`ScYXtLV$8F)> zx1`rP{M|NGD_WXj?l9H-mgyX@rIj`2W&4D=VA^{alfNtmO`QrL1pUdWO|Sp}AOJ~3 zK~xR|)g-1%61#6hhwT{x`i&W4#W7Nn#A&$E9e`VPPNl$#3h1(WoXCSjLL`Od7SqUg z4I(R+f~x}4?{u@Bv)?eU%f^*TieHN1R{9ZN?ig~XBf{vEqC-X_cOpL+{g4sNE3}um z+{cn^3wlFxlRHn_r@!;DQuPZY0(Tc^;vv%jafnk;zemHVPzU~j*%%3gu1*UmW|~GX z%nkv)K7p;raz3JcUv`y?G)wct1Ztp&1M(C=4vS;b9a*YUWuPH5;~Pi$@{WOQWr)sz z-H5jp7@PFGttkH41M?@H0K2@H+*uhf(iRYozND3BNaSGRDs@750F^!+8UOVBKZc#< za3|T@m~eu-(l7 z5XW7zu>CBr0BS&$zpMJF$J+agl2jp}Xa=_B$hy6#xh=yp$B-ebgPhFHYB#}&0}?Px zVj8fc&o$JSIEUG7@ID%$9p+Ei4=*t_MUIV)dU!?vD+Sy8x5!JH1pF!y&%Q>xIpMZ0OFUNGbfLY{SoEqWpf z7g3ZJ^BFA<*%k0kp7e!mz3c5gcc#5Aul(~}1Z)%{yPgIu!l6w_3$4U2Kz$fR@Fik}*S&(85=A+7$ zQB2Ssb+X4Wpupae6jy3_3eD%u+cMjA5W9fX!EgtDgd=9_I{;gmhj%ENW{Qs!CX=+PAL>tS`?==^V3_ivZ8r1YyJi<@$XQLODQjm$kl z=2`|URcGUxY0;%CGeARh#fK5S%Mg#NqP2#0{4V-GLSEuxTSdaw1OOq&!dX2`MNi3q z$4Oyz7(?t|&g)hJ7X^E}7`o>$o{;SOe-m~175@DFwt-yWf+j*a#V|X#D@#rk^T!yc z&H))3s-eZ?2FYru`%#k!w)}~uKTA=5!~ae1?*G<(2{n0=MMb5e;=lDAn0ML8tU${O zMq37|g)>K?+^T@$M^kXz!6I6XmFf)uut#=>`7Fmi0e~)BSWf}4z zI2qeR&%FSoU!S(~ILhz`YcQhc0KubG)3eSGv@7s8z;1u~FLm37Jeiu&QT>*ELoS=} zFOU9BgU^WnN9D`}tFl8iUP+SE3}!`0JQcuk#q$Q>wkdy`1MAmb=_&vyjpQf+8Yg2{ zCSW;VdAb2Jn2zC^7J=-wqS;p>_o04K#+vZApXovd&EntOHDPJ%iF*(jGq9|e>ZNt(|dzf0cG`Z`^q82QWUO=kd z-;ZZ?#JP;x8fE%xi4OY#|K<309l(;7aSFqjX;ol2X)?t*J#^(Wh#YHyFg$evZ}r1F z`*QOoyESps4NnHXTnP2`NRvm3LK&sG5N9~;a%4q82lF(}tex4oa-1CiDSEQsIh_ME z@NTkZvqYEvH@VZ`=Prqkkdy$%jB~+dn1NPvo#}x1BV=LuFR}Pv_jZ)=(R$`wP~4%Kqh zIQs7`iH7_X{pHaU3Of|(*GrOx#xOz70cuvK+X}j2%nJfpdkG^@6OE_?2b4YG*rtAl zUr;KXYJhq+r>KeABKq7lh4goqEpWMh#4q4qu%Q)F)!|Fghh@@Q8sMU^vS^Ed97Ze) z7cA-~Lnn^vfefyQpB;@sDT)mmK1rnO+q$udNvMM(9FR!o1b&=HDTW13V2C45_mDZ9 z9ukJd{6v*_mF=_r7$p3!4U#exXjdHuZ4OCZ@*^<_mu!3>D3oD1yQY%EZkt`&2< zqGT_f_eil4(W60@-rY$^Gy#exA2$p)`S{$3kLsc6zetScH0YEV4&No(Zc#TZK&rX3 zP_3<=1yu{)Hh^{?z__|EeJ^ezegpZNH_%>k*EJK|wq0E-alg8y5u0sD@L zEgB2>@dX}+HSnV51YY3|!wvLG0+50q;EG&io#JE_NC^h$jGF)ufZ7QKVrrFylL>88 zV2L{wzd|Mh#)AHe_IIa2Z&=QMpe^`vduQT$IY`n0I$g9VMTjY}4(>DtB9a?{QQ}o{ z99jBQOVLJlnA(OQeLrWAC%BDvN>hgp7Ca8abNP?Uz~Ok4CJb?rOETv)4G*?r=&R)% z<<$X@oCC0tB#+@H()4XyK{Eccp^Ac=421voet1WeZiDW4*U z0S)23tDKRsEIL+%M5`n%)|@!E@?*d=H1w40H_`bw@#w!1tig~B12p2cdECYF`IYD1 zR}@EOeAP^4AR>8wrn}BVy{n~mo6jsX;-OPFdikj+?z!+%U38DqP(qR{%ujd2G@s|H zLdsais~G9Om7|t{?f8ZoXd$4lX7WE-S=U{-z3n&5Ulc&LrA2{>N%UoKxy&7KV#Tyj zMQaS?vJuC)C4Lz4?$UkzRdC)yxZT<9k7sq$?Yb?2j5<6=%p=Hx2pMH?M)h-cExZRv z{Xj6SZ4yd=mLn=YBA~TuoEib(m9EU8Lq#(xGj)ct1i+m3Qg%$5pF#u$_agRpln4l@ z9f#mc{ul7}MdHvg97LYnv8bWpF^a=zD+=yOjX0CVG8zYw(Ocv1eTVm3T8GhRA4M3&4!7k)6l!*d87xxICs>9XiJPZmPdbLNLFi(%Z^-qU8o|6gmQ2c%%>)X zV5|i;S@Yy)XSpTaVsm$CIT!H8vTVeylWocZag2c(Z2@i^tBlPco(KVL__qgnG#I+2 z*>Ss@2A(CM*LN295dxM0MmNRq0dY)ZHdQ0af5anu`ePwNtHsT+E^NNzOJzVbz_xOF zYOKF8`TKfxeV4LW5~0kn46r3iW&&hB7pf;u)laMwku3xilBaBlUA=uY{BI`!k6q{# zqF>(d*oD0)iyRk1kipz-IWghbJ=qed|PMkdZH!F8d9()CrCQXG2%IqXX zeivq%d|I)Mh6RNe?Nl*;FiKECQV^jb4F~?hm*MB__`;adf#o#07IegbR=|fj!ggMc zk!uzpF}rw}qOhVs1-}(CgU~#F+cJ%;oJcDf7=ap8-BOoa<=?2d0MY>T7uFZP;P2Pa z+Xu6^(|<5BSu6r4F%5-HsTUe(?rSg>^%PdOtBB|29R`~xCqJ4Gq zX!aO_RCVWaqt~|3FLle7OhVRV9!^B;^R8uvCrm5o0w& zj%OlpH{aEC;?2OF_exz6j#1?wdgR7fLjA_H^)mh*PRvr4puB`BP4P&#%;F#$Cq0Uw zqZlrAzQvlWC^R;d7VPhy{DW;wpvli*miDzex;mxn7bORh7054#aG)CBkGOo0xH&0hA79 z8ubOg1A_|NH==M3z(7;%Q6M{5sRBW1aYsHw*dju?LkVZ+UXFR_fK7j!G&rDj z$cD5qs1T>V#3N!Y5huk~z*g`V%8z)lvOY0GsD(=(MOsYrWqSZ;k|th6Ejer#BxJP) z;&`u#*AkaBIi~pKS{M_8uj4h$*TRUd9)l+MiX!@Kja0@j({b4$MB-~fQQL&H{g4^W z%sr{(=t8-gB{BEoa;<(DeZj(bLx<}J_>S{7H5M@4X~QLVff|Zp4_m$t#yM7Q7!p|p zqD=ur4#VMAa!LECD>Fm!^nYn=CJU!S?4gEXf=ha*(2wRF{s2RnBwK+zQcxZQiL?eo z7iNF%)&W*L77BO-k2Em$HR~j~yyKbJe_|dXE6@8;d(qz*hfB}_8l2FSzA}jy+>v%z+UaHh z^Eu()x@sa3yBNWSe2`j%5tF%={hlxl3|M<#0GQl18_QxI+#;v+15j%~=2!%QDM)@m zp$0$>Ex0*k9FgUdBH^>)FI>H5#NM$Tm;7g;%mN&fTOhfDhgN%m6c$<3gM?qk>d2Hi z4oPnuFYpb}=im`V0@RrL0mg_X0Ehvy!|yk_Ue~PJwoMp4Ta$u5 zh^Hbbg?<22_(rZ`!Q*h?CCwNAz3d-xmbU=#01v_8PAm;-1{NcQoYXl=fmxntAH(2w z{RG0%D2H&vk!LTLNY#cf7g%Cwhagz{5uFuLNpN8N00%}?Prh9Te0lPjiA#$%c6Qzw zRpC(}(GsU|9UJgs112IrVJv%Qm!eQVH*pu5Y2POhhB9-)Lm1R3<1Ha@*uXKNiDPKJ z<>U2Q{xh6@9EK&OOC%o4*MM^PfR+^4w%ldyJ^*GgR0VB?S_8`^Og*0&{tvlniB1_XcLWpIRmIVY5 z1-~{8l{ZtIe9}LPegghgL8UG96*M2oscpCkSRyc7dgJ&DKESth9K#(`^FSQh6nO#> z!^xDJSq%g#4uOkoaC3sw0^Nk2h7Qt75>NU;mGb};Tt(`+O|0#pN?a+z$kB=seK6xk zpw`!MykRf@YlZn=_N9vhOK{1U0@Rcu1&HeI+zSJN@3Ojqvfc_-KN9DIEQbu~(mFC7 z)8Mbb!zmF$r$}LP2$~pgIpr@NZ_SsRSueP9|#Q7|i=3NviC)C2~AN1Sz9aT4uV0|Xm+GovEc zYO7wW`Gh~nH4;RX(2FC2am0wq>Em_Cg&)`VE5f)eV=myA>8M25%1i(aj6l|gpAG-m zU}z^&Q><_LDf(!#iPbj5)Y%DtRU%?@w=fpy`kBzeYpyvVBVBXCDNfh`T?>@yKuv~V zkiHBYy9~78&l6`O=YiCsHVWSirbr+x&5Np^#wqaC^Z{bJSnOSt4n%x!DaNDVEzqM9 zWexG2Z(c_X)+7vujADT!4fKp(836LwX&Ib*o$zY}_T!t$p(X%svCPYlg;NZ??SVBSaSmn{6e4RaFbdB7UbY&=1 z`9#%M@0yeX8(<8_vEp}R>9!f*@;8n{>kSNysVJlw2bRMocxWxHfw4e?3=={fz#258 z3?v5rzvgKN;7nizj203oyRSTV%0h4LT|#}_PtK5A`tg8#CHxZXM}t^FOuv-8vH7Q8Zkd}+o_9Vf-k(Mj-fS#b!gDt;dFOVniykGe9;(k&z_`^leRq= zMK&y5;&8uGIWfwyGQJ;Mqe^`=5tF6}3jaLIn55b}tuR*NIB6blMPD1Hc#y8;z#@lp zX-$j)MTXL0BXW7V#jl8*T&WOkHRfUq7cck|21XL&0zb+K&15B-4l5%%wP=*`2)2NU z@dBn`uh$a&|EKKTwqwb8Cb0mRwfh?%-o*Br3t!mp|5VdTB76Y^h|E&=Kd!Y~EmcWX zAc#ZYK#+IfL;U0wqv2Rg(}{pDBYA-vwkmm2^nJ4sOKeP`>*iU2}7Qm_qfNA@J@Su4C`M}Ad6fHgbLh@r`rVG(i z^e6vM0>V^*7x?Jn3?w-LfOW*0atgdFDM8{2Vly_Dn>lv`E@0=thuE-e>rjB}S3!Jr zLbHwMDG)SxPupT93}l_8Dmu;Cf1-ASRH7_fDv@gyNhYGlHr(W{W-TNC-e%4q?ksw4 z*y5etx!Fu9c~uRsf*A4~J3a*t$x|@Q_kqZMR4n4s*+r*Sj;muq2yG0av1Ky7UnB_9 zxM8-r7I$F9g|%y#Ykr)DDgzYo;Sp-_A2`uwCnA#1Ek7XvkY>u%4glM>sYB`q{c0WZ z05hHFygzG2Ugc*_UWj|sm&3!52>@D{uhR;FC;!p}&=dE?v-|t_V0QBV-pmnEZD)4`K(dOku&8kaw*>Eb0I`Miar+ z0SZvbE|&_t=#K#NJ`?3{(};!Kw*2n#B4NN%J+*G`qZ0^aaKo{~voJ|so4b$c8Bq0; z^a))CGua=*RsAM%`|@F4r|8+%nIp{6S^J6Agc>wesA(Vyzt{U?bycJ-uT*U?Gd*>;dL%hp$-Q&k0-S2cU6 zGDe5pz#n*ClXL+$-0>lPZqf{kd2!(;%NgoCO0r?&8YrO+5KvM5gMNwH^ht%Jn*K-X z-(gE5xJsF7PuYu214ybLTd5&XAes0;%B(&RAGBs&~ zeyF@^8oJG!_*1uo;cu)3yK#RqCJuhElm2^i;bRQiW=$1QzjZRZQlNHa8zq1~Ag|yH zK$*+N_;qd*1&f^dJ8FrN=(M;e{OqnuNGdZP*~&47eQw?>FUH7|@2867HR#0xToutm zAdZIX7APuQr{n`ANdTAVhw}bm6`2Z0qp)S;I}`ovxilb=Slx5Z8@a+_a~bTzdz&rX zxdztsP=@EwapPJ?36;sj^|w_Q%_T8P@TgCYD+p_Hf?xnsBo(F};$P_x9C6iROCGkvQLS-l zR`e$Q>s1@bYYAYP96$2Y@|U?R+~hKBRtucs+*s?QZu^^bTRGiDjDm_W$Ts5LVm$PB zfH}#t_>W9Yv`CctJme?EosOs}FM2g9j#*(r`UV1v!EKl0-f6b3%^e$d;RrQ zk0&!j)xgwd0^(^d)W?zA=EpwDEp>_h;McMMAmLTv?^mIFpZo?sz>Un?#>U5CJGL34 zu2BhCm$e(#Buv#3t`1)&{SyPZ?5F8iOh|vVz_A5*C}TYn=j1l~pxdws5) z@{7%NF>akNEb!l^r|8NqAk)ZNK$mHVXa}lq51qqIefG0L2}6ef03ZNKL_t)%GKp9I z2GwbiubZsToE(=g27JM9q+QVdY4k*mXX`jx{eC zBJ48Z`g=qKUZ;pFE1hms=2A3^PLt$SiB0gLJz1q7&iMv3j>gVwp0M{?SE3#Z2;J3vh09hUl=@}p9sHd~G^Z#Y3@hY^WEDcy*5ZTuM42Q?Zru=a%ok9{vAae;Sca-{`2dvSF> zc0VQ~65@aJR$hVao{wP^VJ7+S?g`pi1#^N)6gNHU(k;8gm&fK+*Grh~wHC6qcv-Lp z>Oi=m4)oy45X_LN8$TLbZlD?sfo#id_}#6a`NU73z^FO+hx=#E6s9^lVEln!bgyuO zKt!yaxxsAaiEU|cS#7`tBJvY2S)+=QvTC#hDjTF#i($7t@VsPQzD61RwEibLMonkw zU+P>6H*jmJJV4O5(Sn~WP(>b@Ce0b!49`29>P)#+i)0vf`4I3#_ z`_i@t1w4li9Pun2*uaM0;Kc~@;BsIGE?Y1lXy&}qAY?$my5B|)09T!l68~GjUGlAJ zBvp_f_E7w18P8*$3I}xyRq;e_Jn2uHof@(0>e|erWZPeGdF9ZV<)fyNLU3gltoWH4 zo3{R{1X{^|sC$>LsrSpiNn0+V%C~YI#>D|h@BbH?8-HabpNjAOLjBs zZIHV?2O&-T&)1YaAD~_>ST^X-e7FueC`~Q-DPFsvPw&E#10#?H-kfH)xt8rsH~d+= zFYIbNS*oh~RSED`yOF{Y|9PrMl})G`DnK3ODes#U8Iayu^mj*fK`7Nw#q^;CfYZ3) z#M<=09l_f!GylX-2F}4h7B^P%=V~9M6sTQXX;Z2hPJ2;N{PU<$<#2r{3`VV^@ufYP zPT8G4g)l6|>1ZQCtPS{z8nBa=!u=*cIn6Z<+yEk6RRL}Tfy!N2mEg}FR4kSuMIT*O z-(l0QoK?ABG5&lx59Y0=M&16AN0!1YZurLn%L2|#x8PbjDC>fRlYr`)vuS51+ z6FU^BIFjpCb|auuhjnZ%2K~+dsvDu=14=6$GPTYBlB&WJM~YI1&Gx2W)|c7X20Su( zvvQ>bBWMtCoNB9kxx0P$RMGVE-~p_jKq_`JYe*vY!cz+WzE6ZuCGKK?2p} zc~Tkbo=z3*~Lt2^B85eaO5fM z?5WbphfD$OoDQTGf=p73*f_T&1+LO*$NIDY<+UAI(;&Dt6hF^yFi!=$Jba{Ac2qvb z5wt2IQT2DTDukv@IqlnC(;!w}Mo_nra*O^`Zf%ag;pbsd%a8-ej*bi5IedtU8Q~ke zDtbx7z|04J)<1C2R~tQ({By`rMhPcxdk`u1KZ{w zW{)IXjq0DgKhKCf2SDJ&RZ0I6{p{H&cEACU_kKfu;51Ts!6AGS(+=}wLQOEt2VS-T{#l`Szjr` zb9sV*==3GdF8Bt62(z2`t40!E#0^;F^#E0vBA^h(&NQzy1-aj~1r^2WOalX_>Y7n& zr)qcQqk+`2N-fnjK@TlZgV}jdG96?X?;1kxdEOjm!BxHbNm_Jes8d)F{V6={bhB7I za2Xvv_Aq?Y5hZG%K{JpJJfFNAuP;EW|M0e5CL0SaJV_D zIi*^IltHNrVaEIwC6O0M35S4W(web_)w9QL+VmX55EZ5vLNf&@S>joLAUCl#xWEu( zQ0fY4V~hKhA@nSQ4f@lvXYtK&y759iTpN|jqvFRBlbjqU=_sW&6q*y##Y+cJZXhRqT_{dJIZrCtf`9YGfO2TISAVKIF{Lg;A4%%Gwvm%=YXQV*r5qV|J z>ie;lwSmQ~RZyv|3$eqNgJxh3>P~H}EF1vCl`t(!e3z2=YYohvJd*r0C}!Kr!TCS)-+7vF-tsRb;U9$40)8QaE36_~0&bC@Tpw$N7LY~Y9%5)p;`@@_ zlaN?}2|)S_`7^Y5+i_L|Gzzqn5M*-3+Wu*k0jIi73?M$iA6-Dm2)OAOc_a125Otwe z!_c~jLi9(aqlUlw~jqT|J))o9&xQL(3E>lJ5 zx=Rd-08j^c(hX#UTt}#eb;=EPR~W8}wsN>>p5~2uv#3!nB^|WBKQQ5off@_ z>cJ7gTlIoeRxsr!BXyR{m;UB#g66X;_pqPvy6t;32KAY*mO1^#pc}F1u!uZK#g*5q zX3OCIa(^4?b*rd9nZ&Ooub_bdqXqgH=f#<83C;5+{ZH+K7I^jV8G>hO`85Ts_ zfTW*2f5}j-Us)Bm>h%x8qd(odZCwq~bx!$2wTgzfVb~Y4rh%OTJueI3sBk51v19HRj4mL8V zD^copjI!BfkFV^up(rXV%}hiTkX^M0+i;nV9oBoy{?4JnXPL4@->A928JcWiwlp$q zE6^s8w*wH+DXl6c;z~L0kU00=w3Yz!ejlrSs(4D*3)BrRP;DbdkHn#}{gw~c)uKNo ztw(kN7D}3vf#r2ONoG4Ur4Pqdp*@v+%YD#I07*X^NmhYTbqn#o^Xb*wM`3uO3+n{R zM9}?^K3h)GP9{c6WTKh)Vd69QV}q`2kz7QK(w+|&XNy~hoJ&1A@7&NPt=vORc zG5K##{_o5lAhMDQoq8M4k67LlA8=)Qr{$l$ybMM^^-2mnDk zJwxH?!w67Wd%|<@PCd<_1Ew=E9GiJT^6!0nM0!8Hw=V)ntdl4^Awtijp?2*ozuqO; zf6@}Qkg?{+H+epQnM*|7nNg;v?41{Lqr1o;qt-M#YWPagwzyTKRD-~&JK{is_OQkO zY5~$Ius4pgBlh|CxknbvjEdc~m7f`&WA5rG3A-7}ahagW7?Z=?i!>&-3d5P~Pkv(Z zALaeg`rE~T>hzoZhn(S)?SLAiH{kG#LG!%3C&)a(LxZ(zEWhcO&GSb20Y&#T9Iogr zb31om;UkaQeyo*2wo-}Um(gs#KR3fNA9^!Z-dTc0ZZiXy<&iw|Q}Ke?YQmGeghF`+ zID=eAMfYVF#(YHf{UnVe)B_3^ddgKi9o74L$XZZ1G-WO?Cagq<2WYsW!bBPXTSs3kwk^yw0!CK?8q7Xd${n7dya9RXcc1Q#~ zn~Ld*Y`MytX@S!(zc%^LJFH4 zR{hXeQOoJ@$dGNIUWFq$iDVWC%`sO$9~9|$)-=!A+~NxWP4Pw!B(gq)J?;KnLq1wD zb|q;Y2&09KBP5h;yQy4fT;Yv<2D+?mJ?Hhe@zPr){ZN?SmESh~G%|x|sJt^*@mkSb znqqluhBqck_uLUiC{+&Q+KyG&;Z&#f#$&#i&M_pBKg1>a)txC-X=Gu@)52*%cMCd4 zQ`XCk)i_*u=5SagP?Z13z8>-@=?V?@6^JVNIkCt{ElR+66RtF!dLB}v>JrDINKFL) z$n%#DM*?$r0H(5bwZsrGT7|Wh^5LS_wE>-}r!Wwd*l|?|pfe2^@`K?LV6={G5`;SB z`u^eZT35B?qHbj-F5@l%S7E<%(9&NF{^u%8Qj>11`43dFkf0x z8cMdFa^D;9dZd_u*#j!@R6xxF;mb7DDszZx8P@yemy9CSQkELBsq&wXxlwH&i?#|$ zi_j4p1L8&BX*yprbb1w_ZTP2y(&Rs@B`LF%%0ZXYwL(4BXW$1f?~W-cZSj-K^2vWi z?!-y`2dbF)i{xPFK<`n`7iV3Tke6TV+S0WU(5aG1e+>CIbgrgBU*U;t08RPHPqc?^ zLU(Hf4z|D`|B(96ZY*=Rp8qF3D=ld5QkZ0QdmD8kA;mZ4WE)4GqYA=6S9eQ(R5;CA zaZz5okRL|ArDaz2dO7(_z;cdnx_QgH6q%-Wqe7qpt;J?uj^gJ*eG`Gkid}&iCjjm=(lh%}>Rz_k#t#3eKgcS;%oygWT7>r8A??avfv0( zx>#hwl}kSgVvNH(&5tJTSEJcXk~|sB!Him>gev1iTb4*VOG!ol4In4OWrs{z895{* zQbsQ}?NL0WnrD~F6rM^z=|Kn2ojIHTrao6bp{5wMUA2arB}0e*G-QktokMhJ5oNBL zCS*u)3X&q*0B~y|Y)xq8oO|&SCOhwCf#ukp8|W2hLIo}ldv%MX^&HKvA&q8vnfUr; z$RBW+S3y_VCgpH{mznvby`QnD{GWj0F>4`)0_GV8ThEUPt{lv-8d3=Lri9es2bY(S;&-zNs!8QE zP)_}A`nxQD;J<+j$L&oi>L305=*H-et)iWwj+YeZa9lp@r-{jRoA=OP4Ed>K$*@Hx z`iV}W3;?H1oKX8gus?q^ptMbOY0a2GHMvE}%cW_2^JFYlY)OBc$jFpouRDfx<^|+d z=UIB%!fU;Trr5veerfgm@rLCkN;H(yRraJz=QhwZ+%w%wb#@9ARApFv2LxFqfQikk zd zzoSty?b@yZ>$(}c*in5!W$M*~Sj2uydkc&`wFAn92A5gh{%WindAA|yPx;?Nf1Cf- zEtOU8F8a^eW>&QcdBv~gEW+9g`RrkX(s{pJ*JS=n7w&32HO zt}LZV^~$h^=+|f(8B)Qbbi#3P+dl|OKm7R%&z&%3a=XP}oU2k(l~sU3YOJYJJys!d zJi1fMmnt?~Dvxt|3EJU}{rqP>|I_zC+Ju$Y(y2nE28}S(Ewe%~)oo=uJhk-kVQ^>D zQ5fGq(>(W++?3BPz+0Y7+o7*7cl-U&lh!xu)+{B;NJdUzqPhlj@?)HC5&v6$TGGN! zOqsNF+2BEFJ6;>vlEdBc%xe|YA5n8-4hAr>h)I8e>!Gl%zje5G&~N#YLp;d z8RXy^rFt=lYzplk$v%G)b(l!@JaxE-6R%Q*-bbSoH{p)U5YAMIH0~V$9vgsD#LGc) z1>gLD(Bs4AzgG}uV@HNET+>{UhBa=pw%1eD(>=b=1r znbcVWIOX48&HR)8xdqw?%jS)Mt-SBaS0T#;(Mw@_Q59A>)2O>^*}-o6-TKXLH|V~m z^y;1a@rs!eXOIKtH`zXR1kc^cCEV~2j18rDC`|{daEx^#k|3-PZ+acp0ScUP*!YAL zeSo~qqx1w0?FCerd843sOf7#uQh)F4qqGb)w5&|QvGunv*G&3TeoXS;E*aLkQ1W9q zg_cP_MSrzI@X293(<%MafTZR&nS_zl>qo zpOmGL5)ogsjHaoXZg4zoS7KJF`#gLQg+VX7fJ*faT{`&(-~i`dCRHEnJ?NL}thQ{^ z!}Lr-@h0@41N{rNqkv{OSNB_A6=xS4JBSEV)76h-VFKp@I5o&<%$aKJu&r{(x=*z+ z#g_m9zsUXcXZ>8-(5lKLBlG%_tywe$GwSzsVCQx$sBybkQv5G(qCUA?diQ%&!N3Fz ze|52Zu4S(1Wl1B?qg8egpyAG};y3+-igLt{hE z+z!$-{pbRr_Jp54Y-ssM#;en*SJ1E18e< z`M=x9Csx2v;D--`5qZQ7HOxw|(yqDn3eD=G!tB9p`b!u@zYY1pm;6Yjvz}3Z*fYW3 zWWVU2%AWio^D@`aSId!hcE$5nuk^t*s9&S*o0}**q`%N%_zobxrmr*1CfxjJ~Kq$osrr=dT7 zw|sXiD!Ug&`nd-E)#UeJg~u+3rNO5SxHK5zENUy$p4?rWlj2JpAAq41nt7w zwJY-a2VtG61KUFB2@@s?;r;wIL*moZk#bVJ;?0A&m(gra_g}G{bF8&=imG?5X6bB& zCFta=?4Y4=G(VlhM9A~ zumpa0(P+?59J~qb8ajs-Ut;s?HyF#Iq)y`$+j{2MIu>IJ-FjasVFd9r=odDUoYdPB z=+w-SpU&Q%iQ+F?--__-H(yq%O|(3PRSKmoPO{fZ1(>N#gA!3C{8C{|=u| z?uAcUpROjs+}v~SlA){3E)IC9L~3K$c~_oj4!I*COo8-a>HsSCzseOhw&D9<(?4z2 zURa?skV-*_|1isjE4LmMd!&reLA-fo)!=_>f37Qs{gy<=xo7wnhIi(~zRGw0^=AQF zv1r>21%ha9%zlVGFk&TKb&{rzGL-yuj-vFIN1ykZWI*-*<*`fFks%D)+K}LBE2o9+ zSJ!zyqiQQXE&gXS2h5}s=s6ZC5-C5Ih7jbt#z4%PdD$OPxfcR;0U=Zgi>HvhM1`mM z+dIj<^JncuM3r1jjeyjhG`gP z2!no_nZaDey{5j?EYqGsLhoOq^RhVpG)ylu`uhEy<6|2rD=}e(=u&&V&5$AToMd`v zy*~|>rSQ4uM>=K;1mCQk64PPXeCb=6`?0+Sy6qb4a1E$2yl{`h)zmc;^^G{wq+ihf;FQ|zT-=bRUZIhF%vuI)(rP1JAEUx>wO zfWJZKt!yNMzjL9}pUr3H&IxmLE}`jG$IQ-33;o<8NLa2~AlT}@$p(XXb&-%2p^nP08b>z7$R z3;^;@9VMWs3FFOw8}ys!la;a4WsKHkka{?Z74~`u-gNl`Tvj z2QDw26qg&?31hEX8&Zs9+PL5VZ$Oa0Ruc!!%M(JT$^>IMso@v(FYEa-pm-`tehpxq zFtuU7-M2;7yrbPdLJ-I6B< zybVkAo1NdO&h$J4UlRw5{thP8WS59@*w7ezVS_@6;lo_=H&YdL`9^KE=-1D~5*f^~ zmI#(5stsDTic}Il<)@p4fC54P=EkNbe|14T*6vAv09=*EXQ-41ASDmFpV(holHzk;Dl` zr$ee*WuuEHRX*h8_3HO#q3K^GBNBd{Og(qzgzOIn|9hK~|22wc*dILkpC6evl;6bL zke`2XW-t1!Z2r_=x+H(tA2_%k5%zJwtE^~(B)Fv=j!b5>9c#DyeH`+|@G0c@x3#ER zX9l-x$g~gm4{bx`CB{Crhd2%XZNHu-*c`mRj6R;r_!bmC|HX;=fE2hq^6Nm@)X67F zCO=$S(Zr86(vEGKd^j=C8ane&esyA;R|tYkbr1R%xP~uhQWK3&V})-gg=fQrG5q!~gRqPtiZYO@DuZN&ht{@0*SN zOI#+Gsf2H61fuH<27qOK?*cCH68$o-hw}dV zPvR0oPkIXs&;jh;cWWlwgWF9%pL}*H#>4xz-!e(g0+auj4jfHSHZ>VfM-|U+T;Jq* z-%r-IVsek;4HGPH`Q2U-hhDV&Od)LN5exnS*5Lno{{G7W+ZX-z#sB_=>#B0;ke^Ou zt$~*zi~q2aL;mOUZGbF8{Eyh)iqPutjU?vdoBvHeCjIe}|NOcQ*C4FvZwF>$y`epN zE75{qhzquW5BM31{Bi@6{up#N`h3a%lmD%i@zmcIbv`k~LjazOvep#Q&jp`~&?;b0 zar%BV@&8EwHt^H{Ue^Ceh7Id)!>wrnM{b^IBH{B{>OXP(8#27`CWm9#(vutbcwTcq z)94U+HnSxob=Z*KI$mpKu%i;SE9)?!IQ|m)UlriBgzsk}FD!Uc3`h(OfGF*z<$w>&WE;T9|B{Q_JUPb{mp+oV^CZV zVDSrf`}SM-qL~tyCHhm4)uZu_dH=N0-xDsZIeB{`G+Usd2+oS=6Y^v|vDZ=&IX zf%3gDvnvVnP-OoHLHc+4IsCCd=s$tDzB!J;P9*^LSl}ZsI+SRpRggjO z({qr4%b-h_2jvI{rE2n zc_&X^mdL-kI)m{xJ;%6oK=$PSAwG?1R?gPb`uqPM%3t)q_+Md7(XU1(xE#p$KL zlm1s07oz{65o<(X|C1tQU-aW?hD1L+li-!}aT)F|ZqI#hBzji#oBVJ+b$RN-uu*SC z{$@VD_}@8+K|frnDs`Rs(~rWAyQOxjF?YQ36HofzOPIEP`Y`;%LVlH>(qG4Q|L^~O zdS?4#&?T->h8tgcIN7>@zkmzK`0=m$7ym1M{$T%w{ok-J@GsmK@WH;YKcM4h0T;Go zFVGPS0pujFP(_G!C=tMloZ7H|{$D=nw?WBF8t}=uxyJSU{f~(M0Dpi_ttkFC{QN=u z2m22q|9ud-1V>!pg}rbau*(1Mr6x8N;;JVPD+lFr|NK9{(mxfC`uffMrrbuV?E?Nh zzve>zfd36Y|H1ws{sKRU5AF}{3;bZOEEs?bv4Dl!V5?4S-D=!ZEA7fbx!6DdckA9trF^Z!VGuCXKkLHq|le;xkwPstA#?p(qJI`)O|S_M?H+6H(00FJdq z|9_7!k(liL{O0K=^LrIde-q#Gliw`h58^-IU%3Cm{(yfWlKGeTuYYmXw2El4Um!rw zgy`CE1NQ#;ze23WZ=R<4&)+2fdM+e?{7VZ@;!~qL9rOV{;QxaA5BL{il`bdeb3y_Z z_&K_q=PI@eLNpjwZQP`RCbE&)JbWe@p|pa0`u=wE~L7*D<=e;S`_7@wrSmy;Xe^$q{R{tx1V_*0bc z1zfn7mXmp4sco!2{Z;mpKwLa086I%YN4)97`(gYzfmQENo7%Zlz&G6Sw_y?g!$4Gm z9`b~jS3}t4$U7F!kq`&dhE^1Ac@)03qLs5vd=j~bxU>$;dNKy(?b}|vHEfeD#3j(` z08YlHV?A13hK_}w4iCL+MppN0!2oku1RSvZL>+p01pCc-AKb>+rs@1S)-rcQyO72` zE8E%qatT5vcnPJUpoqH*Zp#2OgV&tF{2J$a@`tEwrCcWB#t^NS6urKgOMU{MY-@>y zy%3Hc;4=IOo2Nu^ognG5szO^HA*tI(Y6&k1!eNaB#gN=15wdGRV8C!TR2j#$eJHlXv z-i$E6bnEm(ATShlssx`>V&j+XDJ8uUJ3_M!pM@XCpHR^@Z z2_$eZ!WJy@R4r?kLdApA?kr{7+ql2nfIq*vMqj)=)y9Dfjso=wC2ZBzk2RcnJzAJ^ zSUv$b45@Bdd0>d}1tV=T-2-Nt~zS3C6`bp{=)Y9 zdM(^Z`BE)s7$7%fPy2=^jL0hwt{kk>3OfbkwF}e4yyRzUE54BRT+5$=$!p@X5gm2V%(3l=9w^sV!Sl*h`92%OZekvWJ70VH?75 zIsP_a!E%zuDog|%)kW4EVL8GlZKh)YxR&_-_rlAsZ{}Zx1n#51+QUfo#{A&=6R1jN z-8-MQCRVwt-7Tk9n69v!8L$i<+S?2vy~YdbAo-LMQ@OS6&U%VYD|UxFy-lIA=MX+4 z@B{x(Y`{YUt_-d`&$Vzbz~Ku&R%cx_8X?DWyS3=U0dv;Du#^OECbzAb=DsPP^3zP_ zDDrpr!XB#taDxHtaI)L){w&;%wQ$eMlhAY*Z&BT!b$8hZey3NQ!5m~D6+zO{wzz{2(lfA<2!|7eIhRM+JLHrBa|{^e_5w6&;A&i`ta zngbj7^R%ZeO9ndg{+zABn3uMo4oNs*_z{eC(ZApZ4|omgp0IM79UtFJ4!?By<)NDc zGCL1Xz!yAaGJIKu#n-}C^F;>qg@+7FY0oGv@Pc)GGw0!SzYpT~LgoS+7}vu3?Q;PpC}%q^`jdu8%R1=4#o1rgqsjK6x4ok| z%VyWa)V%b>Z>|p*G)a;%9Ygp+EOsY&w(2`9lm1Enh5^W2ia!BpQesOa{c2*S7`5`) zAUnq2&P@S8c;E_dAV?>6)QxL&ZX`0>iG9+s{TK`jin=xy=ajEvqm3-c~c_u)`4w-9KXKO;4X*Vu+&?Pb#cHJFOB?=RJ6rk z65VeTkHKmTE!dJ4E14J<7m}b?S|Huc|&~A`7_qImR9OCT-Bm;!LKu=!>Vpksa@d@{10L?8MJHA3%TJdVsYCd zS+n&l{x-ls58{Gu8KG(~CIn;|fyF~WX-|SD&GL3b=)Pp=MSsloCfTs`UZid=s#~u$ ziMECrY^jDs-z$+=`fP{~rv-1jv&J*8xbhp^z06Y+y!BSQ-h>7kV?zfFb;$_G8n{Md z8XE4(qnN{f;HdEB5{}NRc0Y&DrY|o{6hlH{h_Vl|rzY>pxjp19`Agt$*aaeX-ugfy z!@#`!ojH7Gt-cN13_!^AWRq>c0l#2B9l_h6YD&VBfVtA&e*s^)i@CIX=?DH-ssd`x z9pF)yt!@UfoONfc=SWMXgl#x5@t-*V4G7uxdW~Jc^ZNjphS-sPPK4rd9Lc3>l6V4M~_ZHj(cHG0I>B^+k z5&jMP;JRWr*|{1LllY9$1TY zZ!2}FhIS+XGSbehviOS<{*2IY;lsAwnX^kxn?5ILy+lD%KbP8|uU%GmUTa|tk}wi~ z zjBd{y(pQ23BO`zX3+62}3}WT@FIoi%$YNvQVx?+JbTaM?#4p55bU6z|7+WyiaQ|V` z`Fc;2R)2t-_9648ow_pv2$<8s0E5UaSY*@~D)+KD$eeRxT8Bk!Vi0VaQ~FB#OuRqQ z!^?GHL<3WS8%KZ21m2(#i@XV-5M19IW{is57=Yac))Kod6o6QOUDzl22BDuA+S_g~Rs(2n-bWuruFc zGffb`{^a$`EoS-lBuFDXz(6XmW-sJ4!n{6(RQ{HoU(bz*o1G`LVM%7%@TIU*kdnQP zHRrFjHS1Qsy`q!;5dOZVdRY-G!m%^J&kGsKS+O^7uR^&%`RqGT^aS@umRQ%I7VfA z%Ex-5>6hrY>FI!xwXrCm4~Yjstb#wZ2wQ2v7zQ%O%4fDG8GboHMF7U@VPBq5$l@Oy z*c2!G`i1EdUh&y3ceTj89mU99w1Zs62mo-2ZHuqxvE!(yF*ru9xaUtP7DkoO*=o_;C-np*_059~2(V6z67t^49Mb^QjbWweMZL*b*>c@>$E2?C%X|Gm7zQJW)^2 z5zK_ohOfNah$Jos!HBG>a*=-%sGc#8%G;TIILkp|BLF@4A7!aqGR`@j z=50H+{N^6G0E6u@G&W?8!{ob4Vh9-0m$@9;wgV ze7YIKZ$Rf|<>P6R9#S&o2V=3JaGn|W!I{ApKq3$eiy6*4&ahzW&6bAu#ah{%@zTsC!jR0W52&~1M>ES{j5)K67F4SYh%0Lq^ zU?r4TkDGO#T%9Vhhupu?5w9(4`EJdhp*>J4%0O92B8McO`Swzdy4f_s4#QiDaz~tb z?YH)xJSCZ#Zhl$V#$VGiPN*^ECqMR*CHeyfERZ)Jm}(e#@e>2|9GV}IUP4(oRMe8K ziB%GC#+e=8BGGh?l|4*P8=(2pE5y9zhEv#PgUW6<%*(lv_e3kYFVUiyNdh^vC{F+w z+>j0QW(+VuV}||{;o=_of5M@!Ba^=PmcwJJ5 zYo!Cuq_us?za;!1=_&9N2F+jSaKB&eIPy0)*2Oyn^VZx9T}nI|>APTwffD$8U=@J4 z@FyG851B1A_T;z?D!&%__G27$TggyDh6Ub{ZTu-H0;LQi$;|C8CpQVtl2L|TnfY0Y z-g{2sWJSlD2>Qu(_NVuTG6{AYc&|`3hr$D|-s~KT>bunSk!dgk|(Ec>=eS zcbOt&lA$B#)`_gyzc&?j+8PXV`R6G@s8FV^0L_HkCM>O1fe9mb+V$YBjA9qz*TTvl z2{ZiYeT}T!uaYEB%3r8?ex0nI&*vI{M*Bmqs|z!(>^Tsdj?5y0K`yxx1`MGfE&KOU ze_2(fi!kEPgYi_zHVCuEm;XZiOMmg@2^u-sd6EPJ&Ol%XOdU zjjRZ4z!_QL3OI0ConBj#{*?z|j?I_w)Ivh95n$iA`DIe=5mlWnnP4HsB@3rF?E~Mu zKj59W-O_`x;M{k%k&$&%CW1)qFfE~Q=t>13%!Zo3oY}M;HkdqDfhYFw_lU8g@vn;2 z-Se}{7-3-nx6qqeSZ)D;%x&Q^_^k@O7BwGyd{2Oe%+I(w{AQTd%lxPE-IV_FVHO3S zqbU1fEVz-52zoF!*4o@yi$4nC9oMd^V>Yj65M%b><5|MF9!O*#`u)p{P8)>huEm@G zAov4*3=A-F17Lk23$Kt+o)=W?Ef(smmsdqt+ zp#s04ZQ=vdIu;Q@E;?i9Wyp*$@HpUgV?kwYeXWO&@LJ>rDKp5T?hHA9yb;V(q zbY$x#ZHoQawRo4)$(}=iWFP{zA}l2o{R*fbe*%BH***Y1)wD8@bSZ7hW#P=aah2!jPgPzosWo5RC+jLZ|#xrf& zolA{3*GaOoZ}i=?3r^l@K9Q}woJ?TqjN4$yKsX1sCxT1hpE_)*`gPi&A_p4GhS;dD zP6Bif#-;$x9u*8hk7qkp07PsoMY|R>D+k7Gie8t2!HVCr1A=us`vB`*U8(`} zMBsJ$eQ0TlhDHiPHF}sRVfyCROD*}|=1FMDKM1BSuy?_Uh{yw|nN{@6vIN{?VVPlCA(cYUk!=m1mBj-QY;4MupY<00N4fO|{s5qFGRK5uVF{oyf>@{v zF-wS-{w3!xOQd&T^J^imjriJyUn3Yi-CJu{2N3)Gxd8~+jtX1?^kUpX+~w96Le8DFhPRMT4gd*on1Z zfy=50c0H|sJl)X|Ab>T4tyAY1@?_)5{?k^afmpM5VWW<-W%i!Mc#t#&md?1VqagrT zq2@dbv!aIKUot>XrF}ojD;&+l07S1p{nZAw0xzwTT;va6=sIKt-&gFPynC4asc8*d ze#Qa2u?*KA*nWK8i{`=F3`1Yor!i4u=h=6=q_p>F;@-f8>Y}Mb*|-7h>XWq7x=Pw| zR*J{5SOc&YBtHw5DMuKf8;f=IvQ42dwRsisdiG&1?YTQ0#JrT80J8>70K0Pd>B@xn z^A8AWe%FF;MF0+u3g^ujM&wCtSsH(ChpL#pGxikhezbzTHgk_tDXtAI|47rN$EmuB za;3WlJ_hl26mefF@+#jCxWa#BYT0mkDi-A$ua57 z2og5}goAgcxhj9+hL6rto3fy4001BWNklkoUf34vRfnwd#_!g%cSJ?~3Q3_C`SlN4 z`U{Zu9!9w_MrBLx1k_3i5hN}r32qBU9%9Smy`A*;g-_?Q?5N;xZj4$2dI^u6`Ni=~ zeg21U3|Zn01bpKU<8PwkmfY?f^Tx+;@P2$P=vC{Lf!L`f;6oo%J)6*N_ z!}a3X!LbK|?tt%dQ^RQcDjGR5-hK~xWOA*Z6T`SvbkyVhf3kqfC zF1`imTGC&8kXMh1c}>bIAJ$Q4 zXHc5>sSYpKMH&6p>yTj!c<=`u38%_tZ+hV&07h=hz~zt3<^`Bt_4CA&zuKd@Lp)vf zuPx!Td-wD7hylxngpE_3sWe<$#PUP`;DjmW;k4P^U(gIjP-eGnlEjQ9+@v$`vEpw2H%rc%Mw zDmti{@-u_TfAJ&FXBa?Km!#K;w(96GDgmufY{Lv6&0@+5YRW&K%zrU{_VONaw^ur4 zCLbpKlZ8`dfW$ur*p76KW{3bHSiKp+fH`zv14_PKd3jX2VsO6o0>FlC;FGpLl2g0Vp~wSa0f0Nok-Eb+J_LwGTp0nhk^Z)x z5{E;?*(bhas8={6IX;-qBs!SQ*dQH@Ajw7E^`1H0f?FNk;W3K93+EKZ5}@vnw=WT% z;B@Hg%&5F19)lBePaDl;O(mTfuriCKgcT5BSE5mvil~Nw6CagN^E_S}w(CjZ%f>VX zdOle7r5Sbgc1=JLzr7Ox!hR6{p)qF zMSX79iw&=r0$kt*7wx89FaR|NCH9?mGU;E{4r??Y{1&LYB1RmRK2^u2LX8Q$au2QkMK$Jzs=@ke!L`q zxC9iKOa8{DIc^oQjoiUJA_HDZu;)0n)`E*LT=?sY=okO+{*krQH7bLH^p_;{^VMns zDxtxmB0RuCqghZ|&m30u_Q5V$|E07;d*CMfpU|i_L-l|RQ6FY#`o6gVvknFTh6Dte zY3{@cX~;ApH-mMETyMwhgZ@xOm`(xNgWHq&56E`>*8ILTdJ^1{)b9BnVL>NI61d&; z3yU=lB^jTmcnF8!jWNQO7w6Z;3i!%%07thlDEN-Uv=B;u_@um7{Qdmck=now-mt$| zroSYL*|9;^ioE-|k?lThI&&gT`!fvt{lYJNo0hhue7=saAZ7~E++;#Q@QDCa=ch+j zW2ytf7-o#x&+Dg5&q<-!L*?cNzc_E-JmnM{3@}7(<1N1*qxM)fsGwo0ZMv?^m#h0f zU`4-beoKus4lK2Ses_S`Z@BGn2^kiQZpTLi*(>S>&q`x8@Q2fd4^*p+t3|gEG_gX{`8X z1H>v%aMds@`!b#Qjk-gKovx{&?EN009Qp+Wg36ynZKK`5ryysJowm`tU{+6PY!n?7NCx zCOU=mqxSPU3-g5(&P0w|O&N8HKb06zD z@9~-mvNnfvFDB*Fb@8nAwFRA{_MlY(Mnwjacq1^_(qVPl~ zXl$s%3;pzGKOMu=(24!#^Ai#cmh{L60NBJ;rvWm+#vNRF8UroBreT7t9`40D;5liV zJfF(@W}+#eXBS{F))&tRatcz)*eV8?8G_~jB7a#iF;x{tVqL2%Qx<@Qm3@_QRbuN! z;;)I6xd5ouB4?it9pLDaPNmhT*a+24vuAk2A}q6RCuvnv5?Ab4sG%bD9v4wKl#e3P zKQa9~9WkZjB&MtNg5ysq;%AJXDe&9E6rw0?VnOSygTi)@i)3cf;en6qAjvO9yk8tJ zo6WLbq z*DV-kXzthxK~p{;A6cdY?(|P*7vOoUi&9-ylyqf5zG`=m6xR6QFMhopz)MH#`{P%q z{5KqI9mcUiMvz9rf1=JwF-tp<^M5j+v~1=}2+p9bMi10U?D465G(^1N%(rB4sQqYD z;aFe$tqub2b2N;EZD3)zVImmU|3}%oWJ#{{NMi;#v%c0q3y_BTN7B*$7uOXI(*Q6y zNJiemGJ;9M;ds9s9dBBUZCHr^22@btV%|02{7U>2r+2z)KP+fVUxie!eN(`_ zobe%1rG3Hd*TGUm)W#?XtL1j|-`WzrH*!9R=Rqy63uL=|-yggnpPD^6n#iJ)Pr?}$ z4kb`!S#0eU5;&KGQ3nf{4-a7C`t5@Kq6VHzx)Wqewg4(ziw(;B2eMD99AOIGB>92Y z#QL(gjrj3Xd{PYT2LStKxZ#(e&_rZk&ht&Y!WikYg#mEVHun!KU^V|dEC7b9b%cOp z%a+PN?nWeQ0mLs(%oH?ueR98;1Ll#UDS#hE(E6E0dO4A%br?YBV%AMYofK-Cvk71n zVpSv0NYFJizl{mpVSX`#{O+eHjsbc9k-GDOuniAJ5P%1;(w?iI35ykDr@&a*j2hJo zw)iBB-~5=zH3Og&ArE2uxXqM7aUybgpk0>912qU}?~*ahPv4!=*;`g2oO%&&GYTqBM{&sYSMTH;c{M1r7^l8MoH`kv@?eA z73S|d#koIyzlQ>AZ!!Sr@r04K84DmBS;kt40rf&c66Q+BQ8|D&g-kfU5&Q#kj{nLd zxqD!9Bifh{QDx|J5fSm}aAJTKrpZ9nm7x|+0-eO3Ie?bd6LjLqB=fy4qo7PudON>_ zV1fSyuPUSoV^?aM+Nnr5!c6oey{_YMN&(1)e8l}H>hTNqAKM@W&%s9xcD{z1=LcEj z<77oYgIrku^z35U1#WBq1A)$;02p&22?g&s)hyjpE5)_Xyw)HpohS1Jb~;Po3_O zLPw=<$=XEb2Ef3ws-r(4AVneGY0$5}*S8(v3GUwD=Su-kUO?3mSWh+$q}5jVM|dDs zokB!}RV&P~pzl>E6Z3d6#f1F6BbgM64yBD7^k2mIyW!CI2mAwsMw(|2Fc3vcp`Fa3 zgLRM@svU2Q=S>J7X2kzM0T7S2|1TT${xEgGMm7-|v7$KILNRsu0oBw9V|nmvjH3BX zLiQ)`CA@zH@G6d&3XI>}8X8gu6B%!a54ccMxKXaK15xQFQ#Vj0v2j&WxIvi*JiYz0 ziO#tU-oGEq7-H0_bS!&`9(5rqzXN+YOFpEYHZPCer9^8NkEKl=3q+F^wl zxM}^qb$K>}voh}%q?hkyu~K^>-CnCYwq4F|H`6u)KNNrs%K1X&--PLqN>q*0zKi9! zUDb=seNzQNWwWi0g(;N)%Q5vehA&$&{8SU*8@BuBKT7|;@_6X7B&HlYq=oai4P>A^ zs|i42S)B$Gf4PuWQXwJFcCA8`Sr0w#`wtuO8|05w*WK9Rl&QU@RO)!=Mm3IXz+vry zooxeyzPL*Obp|+dstxue(0?{5_?-(VEeRa@p+x6{S=yCxI((;VK_;1!U8nx*g zP8CfT5!g4#e--h+?gB+Q{pIx9rwqG4&~TQJB%d|JSB|MS_wvG)F3<7LQo!m1cnI`2 zy1!D=eI?b3qU0*HfY1=coa80mG}1rJ6K%@;l-?yGkGb)h)fb}&_-BStMjskql=N7_ zSM1-3iQ|H4%H5Y^IenuRl3hG0iLx%GsuVMv6&3F_N&E|Suw2*CzfFU$c>Z0OQUrAV zLINWTB<&T|Q(kY6e}*_81yIQOn+b5$#Ub|iVbVgu&*g?{Gi@$1-y<^kYV zx zE0+ZUc#dB!n#m^Z&T6Fmi5iHqMhx9={weUPhWY>H5VtpM=LUUA+u+NMu0$Eaw&0(be8 zt-vfpD1-hV^0NV+U(Kc6ccd0H;zgd09|FMF6ZoVh(ltF`&yS zd7abV?Db~C-NgpK%31Xvjt_I^1BiNK+fc3cxdCB>YwXD<18~I}q%p`%c+FU-BqZxl z?A(Bd0^qj|cwqm#FXk1*Lv~|gU^5B%F1C0x z!*3V7mPX0Me!UFztQs=PHbdz!LuauYWDu*1g~EOeFxXO0hPc;cB6N82H?ZxSNGcw9 zMxt@g51-LMvz$rg2dRvA_`(jX9lS~iSeMbjviv{K1Go#I3FNE`dgJ^beyv z1${Ju_@ccp>8oA{F`|<-lq*}^z;U%Ss3>vB-Qk-NlkzbQ49er zlux-x1OA@K?e+UG#IG(FU-TXe6kL2_kPQLl z0DcCB{u%mz!~S3chDl6X^(0haZYyDHB;;v-{EmH!0kigdy0IqKaqXjo${ zWJ>%DzW#pCAsB0X?*$vGGl?+ae^eq)KcHV}Ld* zgw@#sYnZ2Ie`iLLtOF5X9}Y~E2Wi>MRz|}?$!u$3} z`_u$DS~|5FB0 zSmsm!=U&WVI%eGLLuN_F-_pQ+x)2n+vTkd5bz?jv4#&D0$cFi=`Fs=dhf-YsaH7u@ zYq1c`leVh2uneP>iJ9;uwVI=O&NDdpf2gZ}CHVJORnsk??T}OB1~+!d4=Kk59v^wq zVWIqe=D_lv-N%zo0PgVPTMqJp_IE7$xl&rs#DPrtJC(Wg1yaZo>7X0iUkO`^#X(xZ zGz!I(j~vqBJj3qK>%I6*pFQ3aW;CiGc-Rnq*RdkjU!VqPmaa-0q#XK8yYBREN~_$G zz{Cjz^=tC;OlJK%VRZ@ssf$e%y+TnN62 zq33&^Es)^iF-2?SJfPVgRsWM?+j2W{x@<}%s~K7*53(`x^P-IhVjE!GiR!@uM#-zgIEdXT-n&t-CW;_m_xdG-a!k71*1 ziYO(9I+VfkJhL^ARAiO0#)}|&(G&X*2NSR#_-rcb`>gFhCr~2RmoN?rwkN43apXe(9XckKd|lZpWC2+ zNL)4=GQJE$6>{YHBw}8V4*|ngSp+Jw_k-iX*sGH~#(UmBoX^JK0({$r+tO(F9L9JP z;spb;Hj!6HTQUhu08s-)Yy=)wz)WSJ4dvG|Q2q}6ZvMlE6nw~zWt5uv#@NH$#6oT9$}=#Hhipx zisX0Ct;ndWDN@Wj0TRPys zHBHa}z=;*75$Vr2l-f%r&kN=@J=StqSGF4goIqCnsiM?*2&YHX=o5xH09GoNDrW7x zL<*o4Sb4hl=%;;)P7JK^{_WQtc_4v+Cc^XXL@L7wg-$-6Wg1CE*?O({Df|Ne2!YNV zOh2RFTz7aLudhOXT<^%D!n(cR_YHu7hFv8ac;ve)1CTnb#6eEtGshLx7Ly8i-$lHS zRlIHLe-=SgutJa3H+SH5g`zATA-S2gd>M*Jrp_qPaKBOzO%IZ|krTC+NlOn4`yWM+ z4H5d#;B~)Bt%5q$g^>fgqvwbKNq@U=QIXpY0sZHfWY+`rZ!hKr?sFOXnM&0vVy4lA z5g;t+5y%nERkVamnTmSuJGMqKR<8JGJpHRt@Ywu+Ao|}pBnKLbQoO8z)L|Q}d`-IT zAcI-R9!337JD975q}kYbgP(UeCn@(@n!(=YGj6wXA-`}_xXj>BP06h=ThNdbn^F^O zjm8bsal0UCj`DfuSk|9Aoiwzw#^YfDy^;J2=FLw_3Pn1Xt{`K9k>bh$eu`Z?mNAj^qcyPgVSmJNIJ@{^Ngn*XXkwZV55OkaZ@KfEV$-h!@7u5u z#HM3gL}nUEGQBOG)y*vHHXV!FcGS|ZGi)*$&j(lktIP6$R@a;Q)d6MJLAR>swhA1> z-;8whnCqQGm&m`ie&C;x!yx_)Y1p8?3oEDl_|F^hBAzmQ0f>PsH=5b$>}d{GWmcXJ z6JhuO891ThTp!C>;5>!3?_@!@0#c#U;vmX&kXzd)5!Ui_<6U7rVg9C^K}=bbTfkQz>-5I zyDZ9~RJ$@aQ5n>&MzC~+L-2pFmJ~1x{uro!0Q1&iH!buH=5L~@Oc$BI{c>yqj*Uo* zGtcv*j;N~Wknm3+3&}Hceu3=~;F>T`h4&Vrwf4*gRZ*A~>On2J$-ts~ zUp=Dy8v>y&&aw@!6ZE$`-06f`W&WUCP7y_o%pT-55inL_*bfnBuC9T#GA z!%^{C1UyGVqfGqllF75PYWj$6`HRlW3-;FcS+oX-WM9keVvP_w_$$nXj`Zz5+X zlL4#I9mpfI3jhafV-bFx9L58fH)VenG%rx=%~N^`A4O)>H|Qu2x2#cGFo11{IvF_2 zfy(@cB|KS1=_7M~cHNmCjOvDi_O8qD9UBJ(J)H|5;QQrxEPo!Pp(7#L~l| z{AaZPvIXhK3>q@8qmrN*M?;X%V2id&x1A7QmGAbRS>xW5$2Ye zE5&VdOzLP~?1?WHPu(tr6>M-EQ`Tr4lu4|5erY%!q%rc4vCRBN-kc~#R zt^&e6)bsfH!1h<0?!BTr13M~^Wd=`{rh~Z*mg-_A0La)@rcw347h*dEG2wp*&^;hD z=0L2M`OrK&AkxgF!X_;@#Mnh1CQl;PHKsK_q8FZ#IpH+M_fuAl8 zUP|H(`R}Vp1(v!BvX`ryA?ma%YRr0&=YgG}%BMHxMyD*pYpd8$<`4#!)grR!9#zRqlrAqj= z=ISTk2Dk%sL><50)$^GH;7`b_NFs4zDvZm{&PaTT2|;+p%c^y-CKKv?5BYT+8XYv4 zZ*maXHKR0X}J+_z`Txz{|VGM`>_mAgYr~%R;Eh! zY}I6ptBlT#001BWNklFT{ZH{|RqPASwUt2%HP1|OFFTWTtIPEWCH1Kl! zMET2JleIFE?p{V{ic#zIS2W`5+3QIoCNuz5RoX!ImeoNMO(Gh{i#+uLS;oYo3v!d& z`7taa44;6}wwcjabrR-%Z4voRZNwUb;TqDn6Ej~^bj}U?SLr!E@wsIcUfYP|)d;|& zs2&>7W=LNrld4K$cQR=XLxkS@c#mPCzTtfTI;8-<|Lm={h*c6Mot142Mr`oPVFn>O zP8$U>JeLUj>l}#pt3m1z=wng83H`?>D$Q=OR5l$qR+g^PLx(@H;y+LT=(JX@kwdaU z6_2XQuc&Ts_yPap$~)LNY~Hu8Sg6t&m8{Mj7-$tWWr|q!wdFAwGX(T_VyoMhon46d zRLI(DC=Caz*;a|xU%=EfX#t=ru(Lu?>!H% zq}RhfowYh8BYO>n&2yoXk(PW!gnQ68{RcsMfE$;^=E_P42ZCJSuC#Cppwv?>K$Yb~ zeJ9|fgzpgQ187FYlUqF${C41n&RbDALBMNHh1tja4~VdB>*6MiVcV8-6PHS%jmV>q zeVJH)DgXyeV*87Q%-qmYzfBOXy=qZ{`j+pEF%B3}b!0xct#ndV&CoHP~g+O}0<>?N7Hf)UPZO;g;R}HPkvh?M*(7 zO5!xxcM-w5KXm4|9tL&3*4f#!I?hde!T_BWFvSZMetttnqljDnG8^QyB z+$v-qOh8@VYr!4tTTJ#Jx8U&=kC^gW2EZc+HDt<;CetbCM{ji_Sfd>;W%j5k@H;RM z%sdFD7dU9o$CX5qdAVQNDoE+Ns_3?y%z=d85O&~4<^U>|KScZS+& zWq~z=W#qg@N3qptp4|7mo(5#-sXE}rP1GK+O$K!{FmbyPAFKuY8?+z0xHrS$&a@&( zGPsQ4a5CJy3gaxD0Ecl|q11HY z{dlwUhm+`&bM0gHH@M)7v2A^-`=2X3^+XUWPwh11wD-EFOM&Ia|Hzl>=kh=<+i-wX zfTeZw&}vkB?*VusziwYT%s$|U=Hw4NgUu`TXR;BqXegPu+Av|^NJikq~xQr;0@1j@Z+B^ zu1{s5w)~6=qVG@oFDnIYBP%CjbqKC=kZWjMzF>~Ax){rxS3Z{TxZ8bc)S+=^8R(CS zaP-n@tWG8;)p`Y$a>#?ZaCMtivN1spy+&!uDAlX2Oo4tgJ^P!kSYmj=lg$)XC!fLr z1o)w^g%7b7KeUSBeji}kSqqmX>-d@ z*}`!(&TxAZSJa4< z4^xME=_c5c0*-MmaD{#4rinI4i>q{R*-A7fQltZ_R8!=8SXXJN`Xq|Z$xnh+Wk|yY|*{#&U)(J$Dp9m0+3K55Y5$$%=Z19zqlOL$XJs_Pdms z7mws4QD?~o8QdS|53yHrutk;@Uo{RA6g~C>@?_oQf@-N143D(L6pMC;q$O!zui?|T zwVW-uebQCMeP$Lwta2ZNWxL7#x+l+4d-WXI4gs0!2>5LYKLgrFdhdbu-?m_UlALlR zug-L3Rh4B{FC9u%@1*HrQdGqScG=bWzOAf$qQ@L*VAk`RfR!vY34ONYCDI5*H%?jV z*eauJn_6Nh6)q#AC>zUjoFhDNAvWll(}OGQZ4M}e0u1;wAff;vV_esMi~-RAXf0^T zCB0#H_@Uv>Jg7*|2Oh)+?u_M&VC0!#SXC1gPFS5un=;V6cFzpvcC}7TP-LEw4l5;( zJh8gvY15i%-gd1KO?y&W!hbGFrH6GB3p3KmwGqoM#wWlA?PAt@0Wz50VS>m2q8Lh@daB zWy7kiKJde?m*XXqPxMn#6>-(#rWle$Zr{Aw5(F9EiNmv~gx@y&v-nfxsw*d-r=5f_ z_}b*+vL;s=K}(2MEx~?h`~Ys@N8^Gh`v7Gbm@K)Lr!f}N0!z#;i6iU5$G(&fHqt(J z{lq7y6a_uId9rj~Hz=E^h`PP4HA1-k=y!yqRcT8=PeFFK@oEF^dhP)Hyk8A{APm^B zOL~x<;!WH$!l>sBfwDMzxR6qt~K;K8~=ti~BD>Q9{EJK$uJ3#;43x zv1FNya~jYc{wqT$1~YEJ4ch?gBL&%l)r_=}nj|H60Et&GwVEra+Tp+vItTa<;IK~# zm9@#8mOqw}7YTO|!B{~eXp5;Dk^xM-d6mtjMD!MrM0))#2|CjK4upIgvZW!nZb7O0?AJ#6V~R0Uo#~COFyFDSgr5;yP7*(ezW``vE`!Y*|4Bbq z33Ir0{x#!lPyCxeiaY&SE=*MNhCi*0jw?73Ve2o$;eR9i=d#!vHA53*V0pGBv6>C7 zau{ByEc!|K6T(g@t&GeI{=Yd8HSw&8W5pV$JW|l52{P&5s-Q|7N)Y}r{CBv43w0o& zX9yIrn1`mC&LlBl!B^UDsv={7&27hGNF*oC6ge5`ahXXORK&wzv|Eb1V{R2yYtF4W znnFMz0{Dxya?Fn*hz(nefPLa~<@ByP;UmOCsldDx*q=lrEYo%2!f@OLJdAMa!_3=7 z1#eqo+A-;pRpFEDT1E&f@ zh^(L8{hNEUl|RsjQi@}>@}C^9yVd4^RoP)x?95nL;Lbo|me(8clgpP5JMqI*09*;O zj!%$?$ZasV0^eytY{1JiC_?u(4RkoR>3ClN75Z-DBC?E66_y4IvrS)!jcfB`0I@eN z^1~FA;mn#28sTOl@1*4g!{JgepP3hJ!Ed!`DZ%kExC|@Gc9sMN1%KX)L#v@fx|C!8 zfp1`eL~ObPT>Q{hw#SrDd8L>=XtO@_E#WQ?s06*=;Xi_M;$KqLWP(u(|=N4dqYZiihkhFV zG&MNzZzdz1^2rpKvjB{~&8}d;BcoZxRT=s5?4hZVN2d76fj`OM(*DZVEILm`OO$y& z>EDruwWnt#IW+K3;UA5E`J?q~n3?+rafO9hR4mbR5v8)j0~u6l-?9FQe=+FCL_ZBD z2K=I*5-m|7#PLd(*K>o|ds7miwRcizqy$Ug;SSs60;?i`hAWT8_TXPf2Y+O)EZGN1 zY$w9m@Z;D$X*BRE!!7FryTkxOL3LkO8z)9?sS}e1WaYtzhueauB}a0@S5}CG(J1;c z(kye=D7O}7y!L<}pr7gEt=Kxrl*Dz}$vVv_dh0N~mzi)&My8cisWCdScbF1fWfd9C zjG&tw%7B+H>^>wzhJd!*r?edk=_urFv7dWmnuaV@fGOyiGq|YTki_ZE#_||+l9rTv z0I4YgKotUk0rS1-X56v-@`^Wd17Wk0!sN4E00iBoJi*GO5iA1!eu^ zl9hhTGS*s=Y*iA#hAkM4I#M?WK-+~6u{Po&1Lk$&(5C<}2Ru2^t~7Qk5H9#p>M})S z$$gp63{+zTI_%>zjb*ML%pan2{8_Kj){}uBlo~0OHt1(c zfRsNR@I(H+d@_WSAFPF7a2tU|#HNlg&NIIeOXg!KLSG8uMuVE>sv)fG+tmIU#B*<} zh(=(Q{4B4f(+sK1^H&3bJ;%zHO3<&F01jBdBlrQ9xdR_%gBeLber4rvjf8W!!xnNH z=zy36b4=v|UO|SG^$zv5%PlB##-W)?jc92tR}MheAH5fq6}NO-jK;;kTq{dkoU}fl zah>&>RSEZn^nW`#m{#gShd`&`Ub&^#zRUS41GN?jqhH~0mCWc&lEcPlX)UJg zn5;%1xIR_xW2@ZJ*tAPF2g=be(O(3ALGUwmc~a7nw;}wOP%3f<^4WWLCb};jsXAEaR{!!HeZJ9G6_$^k)?Ip zIW#51O8y)NEEUB^qUXdweQb}(#wTMsK}#vSngph z&`#VuAo28oKPfjyHL{e3Z=l+FOuCcAb!k(r7ywE+rtLseY$C}G1O9YcW2k`+eCa5- zir*a@f5LAH${jIV#(W8&K#)$WrjdEDunLh#@qi&KiWkz@x11t(?3hYola^dz z$ofWC*nv1cS@N(+ys}LmHRBZiO)|9IHw8&E?ozp;tRmjv#V*(cl8TZ+ymR7c+)ixt2@zgd@c9z78{!Q?s;YW=aJE_53ePB6khlPv_CIww9H@Zl}Rm_n? z(JNY*Ojr2|O` zrC!PjPW&&Rag{6ZxN@J%j*aR+>Z zEA3~4)ieOfF>3>;@&AB-L(3ySg1_k}@w{b&9vj-qCwR~g}-S;vjafTmJzgAD?>k7GZ*V8;;*@>9!@YU6AVSrB?-V)wVssvui*HAe=D1Iw+xOG zAjl|C8;df}t+5KDS#VwW7>vGPc@9IGsPJG|yRNMOOu?%rqdE|Iz;A6uTca10&)#KD z5dJaEHzf&*o&>+2Z@GC zMU>nx`uV5?O9p}P`HlWDw8b&s@=d4sKWh7Ur&s_a#7%2Xw#A@Qd zV{s7ttRSj9@l;Xwz#jsG@Cdj^PQxW_1HP7yNNP1u!s%qH;l>dC zVDK+c7E+Ty!%y*V-BkP=f*);duyVpDP|;ivKxVSjpeOy4@gU}y8Us84i)|+2F{r&H zWYf9$H%VkW4cL`>q8l=Puq5Os{7noA+ZO#Kh~-9Uv)B~a+BXF%VIS@b3I8T7fQ6MU zN=D^XN&#HV@C-XF2!PwGrpW+SB~1QBh61r95EFp1Hs%VNTqJx3UyEo=Uzq%WsAm2& zMORXOHEHmwIs34q1FpnfDj3-sWohJ%eX+dDW^a`GHu-Le%@~S}&8~_$9MZQMN1SAM zR*^jL;aavg-GCi#j>RCH*;`0NHA5w zp(Zl|`T38A<5 z>d|2X{**uI1Z2$9noWje$%b(g{d26bg!S1t1>$loQ3*RB{b$R*!i-+U1r(UA1FrI) z6nty=o3T>sU#&G;3B+@y#=jSM1IB+M0X zv5XQhwn5Yn6aLYfT{WPT*$Z?cF}IAmpzKpa+2BGRO!2G6|H&bFCT7sRZVyRXVGJFB zO(8!M`FTX44gA|Nxr&mdp=G%MA=}Mx5(X~#f$ELM)WHM)=spqvUM@VybBJ;;p$)#M zM#qq1a4f6Y?slZEm3AT!EXgkFA{8Xr#;SY*Fz7##nfQl1#Ss1{{Ku{?PxBtd70mU9 z=>#OT=bq=-K65h64#U|B`D7o3cQ84rJTpZ{MgO_{(6q{3L(GbPIN+Dif5gyPCd=Ib zvL=}9R3>(sEX+xRCrQ-!hwv})*qg~_hxWY!P-PAPge&Vt- zAJhIgqq~waCE%mDw!pl&wqs>dCNXugqv+i)ZFoF2LiSgAokC;SX?vjoi)D)S^5ia+ z?U4U^LP>_Scw%2p17X4KEp^#D?OfZds5@cZz@?!%HiF9)f2c=ibPQS760#%mD|3X3 z=Mw%~d?F1z&AU1lX#P_?t|VZj+h{z}q-Q&(5L@AOHkHC1hle|I=W^{)k*0ou3SfdCsA?Q|wRJt@=6R`?KaAFMSe4NN1{DCtMK9aO+N!mtGf}Nt}{OQuGldm+=F$9MKAQW%zC2KiBRk>#}kHvgHlO_NVmm zP5~$Q8-&JxA}H?ok3o~eS{m?gK$ePuq+OezmHP%IfrWg)xKaWXv;G5>lp9VTC3q!4AF8fV3 zv7m^A3WO{TZ3@#~wKVIg+e9t{{weySt;ZP!!uTY4Of2sp%(5aVq{9HJpjOD#5Ki{@a!!N5D!!tNcE0}@fPFGzDVb&GxWpgh;<+5n_6-KW+ z8>&78Bgm8TXs37V6-&Gl9^t?8_QHSOFZ|GlB#UU0_k~0y-xxHPAuHd@WhCL`jMfnG zb(f5WdzGpj2Rv1O!f(R=8oCX4{GaMuAJGY|q9|utpmv^QtkRC0TuQ|8!wgLkR+Vun z2aC~It_E6aV93*yw(uIExD1#*ZXG$nsJ=ZDW*+!w1AKrV`(lAqugocs3RAHSAS_7! znsN{Ieaa<^A^sKwd$h(Qgws=(beZo|M^uRjcE=XDr(46Xa?0Dmd_+K2t$l~ z_}7%}cfk+Q|G{1sq=-Xu{!;4A2)Mz&f;kS|b!I^RxmRVQ74_vb zP^)_XPHog!F(qqC{B^Ri+{WsZ8&578I*E&;ae3Mqj`WEkM)`cBL%*tmiN!HF5t4G? zu6#h11D71(&NA&g{0SACMzwHxMg{+^@~4$oYqGBx7v3lQN%4v#7R5)hmBFeq_@d0r z_{NcY9em@}r_h=?2-Sz~NdXkSedIfpnT ze?s(A$(0sO-Ef-8vw={SRq{ptnBW({-DLZXBT{u$WD^$w@}r2R=Un3IH@&8tC?B0B z_#p|>7Dh;rnG){}zk13><Ha731ABiTZH zyjZ-6%w<1Tcuq+!(@u7(vz&l}1-kG%#_QD!=voI6*b2hTjBeQ3Zvk^va&}}kIlH=f zRREW?p1xRKxx1vD?9SDd#|IASQ<)40yTrIfPa}P@K^%F2p3O%vYe1rApa5iz*Q-XJ zf=XhPiaf#<94Bn_HYX}Gr4oA2PI%qn=lrVNz_tPI8!_PTnyD_91G93@0wD{!=*Q$K zYJQZ|wwFpfVs5|>(T|DrcfFVN*Y!{z13M`>U-nu^>v(OE=T>ZPhk0oAYUJt6M5%!% z{4Qx|H(JbJ`z3R1~c#PPdKgKILYkCbC^}&$~4XQWHqf+6SFDBkKLBH zt2V1U{4V^9p81gxsxw)sE|wC6MNtpCXR0(R9tK7Zc_epaHUl=`UlMJqCx(gs!!@W_ zLe|=UwW5c=eN>`#FWYd!GIBClJ9QY(M!d*Lcvs;oskl?7=<_}k|J|(K_(vu!hWtr# ztVmSzAXSSv;CFdiRriTvjmCfN+ymxey+OjQqf5)MOX1VjsvO2ePdq9%2e6!xcIV6X z_d~Cu45>R2K(m~5Qu)6k52@dW+?k(rhQ-eshSg@MI{Hp7_Mv4^5bI2su_hENpGz+l z+m6I(BjJErX3lYV+ll#3G~ZO)wE6rLV19_*dzN45EDJ&yBF8l+P)@f~I;0HDM5yHQ8$)SHkyXR*U-|#2ol?fYm>si92 z|09(u4U?ZlYS`r-zSauq<;+44GsU?2>t>;i)=Ef zkX=4zv0)*mG|5j4f!TW((5@KZa?o3n$lb?@yq3GqA%!&c;f8+*{yVns98s$;b&n{^ z*f5Nl3u{Z77Vs(Wi15Puhh17jD@U>(R4m<%#JK|$pd$voOH7RD=hxS8Vd@@%wHaor z#mf5BOr6rOsPK2*wYmWbI+nb|${nHdFu5aX!Pb%ipoRe$kNM0IL(I=^MAE0B_%EhJ z!tV@rV{8UJBD)==4h&g5uu_BPl7@+#Z% zIRF4407*naQ~*5bhd^k=WN9CB-Jy<}$AWcFZ^2KN?Ka^zV1wRyY}8!<^l3tgf2sLf z4j}A=r^_QM1F(7NMLbsUTZQ1V9vT2Hl^-PhkPawf$hFtdyLL*7{xwhm1lT$4Ue3UO z$DkkZOCY^+82N(;$(eH%Uq5BvWI_sJpd6 zn!O;0sP^%yY--i-BNMy>{?wr%{6p}wDvO5)l=wsC;dA>3^Qd*+bbf%~PZx6b4Jacr zSa({W5IIXW8E97QrrG*T_@Oe^L{vti%s}D4#}u=PLiu?^^F|S&Sry*#j{m3#?*rp6 z5}F@2@E>Hgo~#50=vfDES3V`~@Yg*Z1beuwOZH{eQ`3KgluWc$FO>LC3s(V$0ly6u z*z|J`q11)4J`8YJ6#dhMe<(^WJFM`Z4F@;#PV`f%8pob%{@tRViGPl8&=dc<_0zK- z!pN2Hu$tV@hQIAXIbOwp;-=$x$$KmQt$|T5{4;BIC3EFroWgW#;Oxb)4|Xug(6L_a zTNq$=Ez!>@z*S4ItKnnp>cM}_6VzY1@SpI<9sjBP+SE{>;=U0NrfwZTZPDTWN^scK zG?Q#LxX}*Xm)7pw3p#`$UzjF-z{vz4{b{Q8V0v~BD*5eoIKj{ZHvq4}!-wXhJDFRW z%hEBBjsE;WK$T~Cm!uonK_`=u4QS2iZEh$J@Y1~x3$%Gy`;wL)%jl%lS&xPPTww=! zqB{m1JZIc8C>}uRS{uTp)2sxEuK#Z_a6%S0{698gYB;vu0?#@rN|JP#JPb;L0NEGQ zo~!VGP-DqYG^l8L(9aG3GJ;Iv@J8`d`pgeqI;@BX%s~voKLkJS@E>6z*)OG_8m$>e zZsBg7Wob4qy0OOpiE9naf3qGE+1bpgP$4kjhseP0*1&C##lH~!EZ1&|{@ZfyG9bL@ zKT!p3q6qHW&-p9lx$C187%k(`4w~rSdq>2-iQW8a3;rUr7yd^k>EOTNZz1rMJyU?r z?}iR*kRh=CbC&tQf246={sXSOA0S3wspcs{3>ox=%}y!C7y=a?cxEu2QLl$^bAM}Ugr|z2cQyy5l-68y=TZ+<=A|Uv; z&AZl(deXfO{5LZb#KIQ*S^@1Ldj3_1m9Qwhd7X-J=qJDGFk1Off`7`>CbqHpg89zB zCj6`N=N9_sxXD4X{f_^!Us)&C@E@~MHiZrO(+u1Cpy7Pb|K#7s|D^wd-x(vZe->e1 zY5u9n><&K!KScj2m`w7=g{!tl56nQ$cXGjBk-oe9ocISuINApOzrKEfACA?mIA*Cpf~jOd(~N)TX>GT zwd|44+8`Sj^?@F+VTcSq%(b*lo#>XuluEF!fJ6MIC6HfjjgB%DmU^o1!D#8qX z1XwqlJ6gC@QORUfDwV3 z1U0blR8m-1TNeFL_#bsaFOBua{9UkGZLifyPyh!0tKiKPVMA5j<3&G~a))OxFyL?e zC(Iqc?ZEkqN?sP|kQ?)J)84^YTOj$9g-y7_uTTkq8U!@%ceH5o@4nN2Ps@qgjlnR- zX8Jc7ny*1W1RPlkQShVcv2--6p=yZHl^!S!B+y%a9^;9cPdng0CehPtk^M^Puo*eT zOL7LJ8GC;O|J5etfSy%$B4O%TOlB2>^osae6|1BKtJr>rP}95Ilsr11-N@nB*V~#9 zXGyLg{j4gi+Yh$E@IOE66%U(#8x}UMvH$xj^#PZ ztsu`rwB}!HW47g&;6H4vH#aBjs|7l_WBfD_Z3am{S@|(#pXI@{N&hBIyu~R=|6w&0 zd&-}r|6K1L^dF)hA^ZeR|3!~|*_1LJP4I{0PpG{UR=hyTA845z@~3_}s7OZBKg-Te zkB>sCpvh{gH)zxM`sG6dbb1qUNot$yI2Y>Epve7b(obz{ccV4&F6ncujP0=1%wQPw z@Wpm}d@|qr`@`hnFo{m@;E9#t$qq*dq~3seO~a3&0(;L6{O8E+|AFc7Z+cjSM>|vt zSXsxDjH(gI50Ybg%DQIa{|$dxPx18;*Ng|+Q5)MO(cCyB0R0wW_I539}oPuzx=MbdJqr4!>^)KK0s>xCz9JU^8x=7 zvv6=tG=tyyH&zbrk-?bcV>nKXAFBBB%5T5mA0Y=H|{@C(TI4y+pb_TZ)4N~UG8wTymUS5;j%FiojR>F2;m5QZcnA(h>&@UN*G+C;j51^ZLL z$AbL;mnB7=0fKMLKh)cI_;=DzE2lOaTVr96dZX(YtrDFN-Owg#k`4Y8w7$R}*fKb{ zv{{q>@mj)!zc0Sye~T+)(0}-iV0v?#;Pb=wKKa3<9U zGJNeIc~W%@7C7>DN?<=a1L9u{`o|A!rO-2THR$`m{F`{*{|@|SSI6LFY=+%rqE8z$ zX{#?7mMP>l&sbh<&(D$jwqE4o1J@7f|Io}FY*1^EF175LD!XD0?BfoH~`j}(WpuWS;;hFU^X9VMH&452+8c*$OuZ6s~ zfZy?N)y@EKXG`SKmff+DU--vZX1kRb@J}gb>n!I9`BxqAmGoPNu9ba0Ni$*Nwtgf_ z??_oZo@+Je&UY* zqf6Gw;o*7S2Ce(ScbU#SRp{J-eG zovnr;Mzm@g^(|oAR+yaF(w{>Jyy2g8h5H?T6{Nqwk86^kU+U(t4u)3%Y+>!f$Mh!3KBu zr}Jx_DSe0@ze&;#`1aC{6SK}IHc0$Xy#2C@f8K(B#D5a^YM(&c(PHREAtPk;z<9s_>9;Ku;Cfqyc>S91Di`0w;X3AGs7 z_n}~>8G*Ly?78Y*_(b@mAEY1l7X7~gpP&2>;J1JuV?}P1zdOdFM<7|CFGm`ApT(g*MXl4T;PR$ z6}Lni1*^;iGi#j_{uWf3NYs zuorkCF7N^_>>uC)9s9}w62Nr~$(Zo-@Bi9EDvezIfxm%Vb*lrd-~ToIKVu0$_n#B~ z>nx3burJt!eSx_U2QI_{fqj)zmC`)0Lh%1z4S(}L@9_V|tq=IuKy|}E8TqlIOLzFu z&p!uvUZ7{l5#gl~lX$`btiPh4{}Y4e?S=n`vRtoKG*LA$6aF93{2yJ@>+(BOd+l8= z0ZIQC;wn4X7q%AxQtDyA6UlOx-^n7_!J5 z@uyEV#TJLs5u*RN0y-uS;1EFGhko5vcXJ4Fdrb|~JJ;9rzIG&0%dQ#{Bv>#2Ic~_X z9R#2n7#RhYb3ChxA}XnJ5*_p379pVD8=ZBqI0vAm}8Xw8R#?Q5u+ruc#bd)f>{}U)dH`Z{;KGiTkRDg6V`=y4~B(<8li!)cJuqa$b(FDEwRR zay$Ee_x=4&EjGS=Sh#Q4aNmo`*Vo~Q0)7DtdmU@nhP4h)+r8+=i8Y_z0w>0M{0=Ja z*Wh)H-#5s|Uf`uy*7=u?j%I!w)5jbaY?l+|X!*JGD7^tg$9sDt9z8^FtGxH9rHv1| zvSFjN&G*nT#PY)nIsK4^?YJaP1FHt@!*Km-B!#?q5yFAG*QEHZ6#~ZsFZ}f?u!EI)sLUbYSRZ&6ux&VrD-=(_9or4b{MGKXf;&lJC~21h zvvJM3acu8`FT}z}-f_ADmji}fXNy<)pqK4X49uh-lK|pfYhtYHAz3C_zwtl!#l|Wo zSKLjfgn!G&cWK}Z!zWS_-WzaaX#J>X}{A1w1t)Eu94+Y%tw+Lt}WLgO};Ow{^d=*0KdGx$ZN$gqxo0TcEZ}jNDbK=!$3T7qHkEF_ zX_54sxSvEn5stCIsSI3ebilC|EJ;Y7r8%x+$C<%5Jjc}M&xZeI;5PwJMHRn5{xJIH z9a;gmZ|e6IO*JrtQ8iCU6DSvI8@}NS7I23Vz5|}b-*)Jfn7n(CO;Mt^B8i4}$`|}X zc)i7Ml?;`#Bs&0*05wPYx>o1hIBFN5G0QLGvIYGFV`}AT_&>~lAO3XQFR(#f`5K0Q zWe7%|BNkRk&&4u5O@qgLr3W)b{{*!kHywgj!+4R?6hR!u>*H>+K92%!=Xfln3_H~w zf2!%m42}ywoZJ5pms<$pYYh{p)0zlTnCn-LH@)EWm_FbI28{fR6y?-n>)l$h z0azZ5=K*&(-~lIW@y~(0ftiuqNy=~Tom~A@OSN2>sxhzmsfDZ`{H2B$u7CIw!Xo$J9oI=ysw||w-lIreV0%L2?&E5GX!MS7*!U}f_>nq zz=;15s}zqVng)fG8vm7de9R3pD(hP9y+Vz2-#_>P~t z;X}}4JY~v3-XZBlP2M!Y)+=p}_WR%VzKG;3bPS&bP-XkuAc8N-?}L6n`-gqobj8+y zzv*Febc~1ESXvt#`@WB#yXBD-W%tecoDJ>ScZR559D^)2Y%GI2{4b0!zea{y!56NBy`Y-6GPM{_K|F95v8fyx86 zhr*vQOmgRaSN7PmHr%#^R8rOFi2mgB>fvS$j{uQF^UYwy2go@?OqT@?QXyG1lITnzlyV#qlt2Ni1MT;Q!RyfO zv@3fH`uZmLbw^0=TC<5472SBjV1&-$g5%m{J0k;$0V6MdHGrnvXx_yA%IQ!}=@|p_ zt$^D|e{8|DR(Gp0_Gx~3z>YZA7QExn$c(__^&si#ther=Ajcr8X+PnmjQ>FaKLka= z55RMvnRX!Mr|&~Q_*`+Zi9q0z72>K-8tJ_OyzdqU-K;xuw<46bhj8=%Ger{JO=p5327#;zEW{vsDv`h3^9$(zV(7WnD zVCC@PcE2_Z=>hj^S~+ikCxyRpJNdlLE1Eur7PDSzr?ABu3UD1ZgaN!S93fYniuZm?C}j3FZ^6aKasn!F$K zrzrMO)r{L5=1yy<7EYK~9df7{SD{P`z; zWwrG_0w5TykzmJ+nsbA*Y&&DNrHmcdL5Kl4katU($52bn*zfJ=H%ZbB_OzXQMXlak zTbV@l0WTevLTTsV)^^tcajgq{RVk+d%fqNn)VSmsK z?!QjpJBDa?)W4~`(UKhgpM)`k5JGn{L3)Wg^xTAc0;7rkQ?@FB9CH)etmcXsPvnUB zH%O1bQ8RE}He=|Wx8FlN2?+#k@NS+C%J_pbLQ`B7jwly*V^Q?uaAE;Y{M@w2q@GCtQ{qoa`XH^g9^!6d z$GFL4H#|kN1?%Wj!EdB9Z+WR=z@)$yA0gZaV5Cu4nu!YB*ELr$o%N62Us~5kVY8M& z!|==jaX}FDJnI}VG7ny`fq zfe+jf02!1Gr(?3NRJvKkL^$EAL(mTq|Cas_Ag8A__0ziNz~1B+uWy?>a6AAH5@pXN zCGm8ZnUx1%ais+@iI@W^lVdS#|NoD&H`|gV$Dzb7K#T0rgO+nN`UL&|_o>qf=m85a zN%N>0@9ZdZb6WrqJ3#VQ_e|nJVD&7OPdzFy{8_rpCq&)cEkYK{xx~W}TjEHLLG%ke znib&3Ap?H{xeewvV9oJL(tGGic21jh4juHNfw@(Z2SE0>lK8SjIu{~0pb~J=H~gdr z>i>=AzD)t$ynZklNVkqR+2-0E%1GQULt!Ki{DrE0kl=P6B2|F{xk_iS&L|K1zhOE6 zY<~%y7o-juxG(b^c+glpl(8+@u1nn9hh^K30pgAc8?hai)_iK6cqAjRziS<5-Qx^? zDT;Y~{~Dj&j&2p;W*(!*&O)T?VNp!7B-+f~o-p8_Lk-H6nf`JOJu7)-S)c|mz~sQG^vk^AEW&#MjN!QxvQm|*X}moEBC zW7lP{epjgxFP?~y_>=FKhP={=il=jk4nWYt;ETTnrD!e*U)JIvXnPIJb_{M;Q2$03 zjl~ORaExX#z#V{vIE4Ez`0)%OA3%KNK*v%X0y@OJ2XCIABBXV%bK=>a>sqw`Y$AJ~uSamj#JSavZ)lOUt#D^?g(?PefvvAyq zB~u2f4(3qci9U3L3Wh`klxQXTis=i>c)t#O1TJB-;iWzHBWlZn5}} z#4{WomQDrw|3QEu7+VS}>Xip?-yaS>P^dLVoU649NXA;KJjP_nR$7-?oD`^4dCV*M z;LrhasA8B`7S*skiN!p=@d>L@PT~G6N-p3z`NO?W%!iztc3=voBj)Rvh2!5@h=|}L zm#Y{6{$jxYgqq)$r)5rHqt&rmCr{XQV{vHucQp3d+nGRGl?|Js9Dl3Dt{436`g8>B z_{sa%W-}WXEy=yj?V&S;Ah#yz&OadSic2m$&Bl0O9(zV6^if(J7&Qvn3*^rdU}9%i zh&$&HT9vft=yXT>o7@)GnTJoY+EX^Itu)}48;?fkh67>_>p;u}iHT(I^N%eVmgW6A za@W;RSF=*?%+}2-2RHY}cTzt4?Hxv}H=<}9n@FbDIXn>+_fPK$MN6neM~y=P=p5(Pkr#c>d(A>ShKtx3E_N(|ps6G#=aB@BjcH07*naR2DP> z`HtV+KM}%jTQ;^IQYXYN{`uX8DLKwfbE9i|Pzp6Sz@W8*SPGtb_LaH2gzp*c=WT%h zH&QkUu7mWoCAW*v8TLlMb;ua~Z4sEo|FYv{OZ7A_0o4-`=Ac4&sbYpeHA@O4jYT3? zpm&m;yBRpLSk2pVjmX@jb>sg@h$G)0N>j~qBY~nN+#zjrv-+VJ9`HM8qXEyDvh@kb zt!XfX{x&^1_s}r20c)(=Jb-TWw91~Fx&xMC=mN>)&p^6y3Ml-46s-vUNdAIecW^ix zGh9`@hJ+1pKcpF{E3Ex7ls$4YxZ*MUJJx#)F>gW92JG?wkjeF(NNZfze-Cwu5}%{z zpke~^nH{w5hFK9nxDv3~Wm61d_^AqC*x`da2p6jwLgVy9@8u60Hnvz+ z*)}c$Pf}!0p796nz|7{iqJ9w6ePTR5^EKgehAT0wef8*cO*||e+7RsaZdm)ULFM3* ziLrDtwMKOQustT^PacdED~q^K&zh>Qtyyy#tz=8(qy9;CQ@GNj0mw3Jr5%={WOumX zY|S!AMZQTDu=g4ixzb@ z(G7md|LK6v&_B;=#33;_k;x!Ji~wvvlfQKRA;ygxdq4*__GppCb3~7)TZJ6W@9Fm- zN|PosDvpTP{|LnWr(A~6KIN02qmq}LQyjdY@}0OW+w<~1_+;^)POEg(QYYe==@I~t zXduh%+nUdJd@hCIzQN;fWM`>h7WbX?L1rM{tUwmIJhz{AkFHhsC4iZWyj4jQ!W>P#J!@vmT5Hu=g6 z&0A!WBE>lz8Q@|b{;;V&3>$XsQ%vPKA;TknSNJUWCFL)sUqnA^1TnS1lLI1eoG*)i z7!unn6Zd@$TYqh=ap7g#^=>W{ifL-XSpMOOL}&yt@IUz=uOaXQ4$oeWei28v>}a#t zVppU5bwRnaTp44DnMtKetgm*sjLM!rnmrN6Jb+@Go1m5~xD4jO3o*Hs&4^^!$_=W4$0XXw*d5CC8ie=R3rL!vdGfziAiXD zQG;dYOw5FlfdTt(KHg^1`tdXak&>G`xsT9`FkzY=6hFMt&XWJofVF*={on)t$!2rd z6b&upAJ1q~q8-R{4{u}y&}qa{@eFp_3lK*L;ES-FaRxC>vDz|DEiD}N&)x0_{BNu< zU^C_c49u*9x#x0kG2wBCo7}m2E&5?utqohah`)1mfz#XK?64tU9}3BnE-e& z3k(MYVzd&ZEnz#J7|D2`JVAhXgI%A)41Y7XM$~4JM`ap0qyu13_fPB|YTmU)%h)lz zhrr@L#rCqmW`=6GTU9dKWB=Oxz#$HsSf@v|94S4dunVOco_lK-g<2*~q9acAy8&R8 zT1x8Bt_SG%=TNn+Lwo%e0FsK%s}5|I+TxRw1HuV4#WR5#t|unu6@$n=swDuy_+G*g zm|yvT&&%dUwC4gEv>qBE z;v0e5j5IH@A-y%XOg^Z&_p}Cg&pj8Sh7qnbr{H!Q!1D(hxFWu^${zH)gRfbD1>{|U z{H)AdO7ASDj``R%lP&TGQfADPQrh`XGQiBHO~Gp*+{Fd%(!c9|N7Gj5r@sY%$YlG+1(>6Ow!IU0FyWDIGhoEh zK!Cu~fV#Pik`Zw{Rg^!Z_ZW#}DQZ*{VRzfV`~z7prUyuL9ih&|;RbVvnWuXf?gJdw zvLQuVWKIlYr3P4Z+BR@}?d=ezrf9J*e_t8P(JKJwPWXb_R&7b6T*P8!g?hdHuwDKsT zIv!2mZFtVczCk^FgY`;T6V1ta6{0NaYrAQO0rMJ?4=^ACOvocM=7O0I$25$b2;&1j z7k6aR->VOOf8|X|mI{~Bq~cf&yUqITtA$wPV4& zrx{m$^l?~?RoQ9vX3y0ok0vv&RocY-= znAqxZ&J6&`<_gRd-tGe8KJF7-KC@H5E|=+B>R)$E`f?Um9#DflndA_MWW#4?Tx}-b zntDiwD4v-fgj%gFMZA1K`<;ORaJ$w$Qhi{%s2tanYyJaF_o+Tn3+Ye@hMeUVMHuN& zKu%sc#CQ({7{nA{qW_^Kn_WIT$1fkD3kf3eBy+Rn^Mu!yfxNglOd{+ZtBq{I&ysZc zo=GWhCr%tP0r(;;x0eV_U*P^Fmh-O0k`bLT`34%rJe(L7KA&9!TI$N zeGue*x05m8%k~H)(>7&&)yM;*S$}X?zK7*0Eg{QQ3G$2Z*P!uQp{LokTjlF3pHQ^h#c;!E1 zAi7rHUg}|hm33(Ph7(Y?PrfRryQsRjn>;-kwIr$^q2Ec3jfmbT0I^~No1zY7p50%@ zE)jnL2mVw|yED25pV7+%H`n1tc?*ZmZ6e;Qa{onm{Tzjr@V-JIHo&$=GdzWj$>We& zx0e1vDQX2i$u^^G!0S`K4a8x0-|`oHq3tMqNM`%jrpG)z@9tw2Uw6o=-ll!IMzU3cm!g|#C@G-wpc_1=Dd0D$k23+l9>n01UXc59k70i-&2ZGQ z4amwb5Wbcss8NB;6r3+6zx`R6sSvKq-4U_Kxe*fATIj>Nl7txsn z&m_E1XF&CBr>Y{ES+Ba0CHP*zjZR?+hEoJ^#4H>my^eDsnOTC=*e>Mbb60|uByc7x zSliMJh}nN7P{;|iIhP@=nai4ct(L9CKbL>YHs^N%xW>kJzwFSGeax+IntC%H-PO}v zQ=tO)m{jW`)kD_%>9M-Jp*qfd5sYrMANw!1uhzmc<2#+Y9#PrmsKfsNADGRU=4Det zX?7j2$M~C~|teq z%av=Ji7$5R2AhVUIXPBw^Bu6m0JA*?#xaLs z`#uWc{g~gfnWLc961}6oeI!xyi7{aCk2Q5*WsY5<01zuV1}?Ah`s+L1h`+`_&RY*^ zzdrKINpTN-mhDFfe-0`-V-eY$Nq!`(@D_>SA5>`d3?&L6p1JSv{fz7xkQ9P!V})a_ zFCPDs?tfFmkSEsxVB6c^e$z^?qP}}7%xoJxqbGuC zx6UmK&IXwSkXsGCVib6|<4taD*GK%TK_G4(UjO`f2!I$Y+U~sR5<6-=fMO!jjmsKbyhl5PlgDF0f9f+qT!P!LDgKaQPtE|H2pc!5no8XzB;k( zg;^iVe&EDT$5YJ#hvCum+Td2r7eVKaz|_I&f#Km7|7rm0P~Z}NJG&zDGQjhCOg+f~ zOpKmFXRSHA84)2N;wsj-ay3vducu3q?BK{6!1ME~=q^A7t*H4l-|_89e<4q#&s|2(y@;gkF~F&S zH0L7Wi$1u0iW?LZ;st0*1ZRh+b~-5+K@6RI?r24W%9=~$(~)IKK&pA8`_P$8hc|Mw z_$#J#ey+D>5ZS7yBgnBEbkaed#u#qVM-#ndfcp>JvK>pKkKlUHMa<+kCi}M_rZ@+H zRc9mjHy)sZ0!^%+ys(gp+-qufyum!T_xUh z(>&){0*Dv2*fzDf9^-WKp~pM$fhf#iic&!UVAhDk8&Ay5DWF zf>V1k3d}3zV9CO0!9OVkHX3DX=0>HP9W^1TqoHQcnF=%;IV9UCn`4^J92wkTe;EU?G|hS3|WGgrRyZw~?71L4=n zUu2<2$_2T8YM%QgmokYSM5mNMcd1>nX)bMGHhfJdWCL(pva##C=Ype-o6bBnWnMK% zp-}AGC!NaP8!6Y8Z7OXJ+_o0b`M1ws;GQ3QzpT?cec#P^{iCCjC7$bpPWQ)0G>1AK z*ni6aNM;WYj^SbGSKjd>gM$4^2i=_4Z1#zK=j9yLDrl<`5^<;n1C6qA3b$26)p8~oqrsM35ZNI-;(Y=+BDD+b`fN~&= zieVf_`ab->Ya!H&0@Sj>%>VPWSS$qzsZkavQ(_*WWV$DKs{d^i{~RdXlKXocd|y^R zL>|}a?%?PMQVTDBLzN`k7mT&-G;^RU2(u?S?vTmb0{_lEuNc<8f=y-CY@m^~jyj7= z)F%ScD115Ke^=)Kyw(ERi(tVwKRW}V0`h+;)n4CA2>Cvtu)Pb#>w-`-S%%Mbz1Cyn zfvx>B3$$}QTe6#t4#MH>C(r+i)A1LjnGcg7F&OKFeDPpeD(xt~a?ou64%O2hC=>A) z1K@OFZ|g_wG^-%18tA>vd77ONb3ffB$!`*UAyGk;!KD7^qw8$ZXsC|$!g z7+WH$%@j%!QvC!k39#~?k%K-6dyK+25Be8Pnxo*IOX&er;`T!*%FXq@e!1bfib$1P z#Fhi6#a^T|Aa60jFUs^nM|^GYAeYRt)A)MO9N0s;ULE7vJ*-2QUh9wxq?DXQ;E=#K zvfs%1o<0F#F9f*~j(w{GVT%PZXD#YUUJ*Q?T?FQ5^GNU*yt?`C(EHLOzHKk;`*Gk_ zd#YHxB{C1;Nr=EPYh@M#ss&<@0cg4(tD}M5ch6vXJtipAt#@w*^6Ounrx{MAI>pKc zVre4e*37=OLVLv&O@9{VFP!nsA(9?~jED0iSK9RL*kJ_Y7$Khr2V|^V#uu9}V_bJVXdA!Nqzy2QF@R=IQ(Bt<_tT1+c z)frEIl*3)M3QdD*7;-&}qp|g@UFpBd#Wo7w-tRqP{pmY&yE_fE)Kn658?=`J&JB@v zoynTBU(6oj@#_Kq@s(yu95!rRBXT{%0ms_hsfW?sTpq#5T~(efnZ$#EFWUbXhyzA<_2DNWQHLDuwq55Gys()s1)e}%&sB$;_=yI@x(?53<+8tlK%VpjR6k<(|@bdON5^576WN z;vS9z7|#Pq5>Nr0S_`hHwwi=uy1Tj~Hni!5x!toT7lubS18@|6DV%=c7#^WJz`O~( z;1BSY>fgm})hRt|cOL24N&&_CE)M_X407(`z#A|idL6c^uQvi{$CUjqwvyv}INXst zLKE3vKw<;}qa3AH{XH9iQ=cf`O~~|OAM$_9OVrb(5MhnqpTi_ek}G@#7oTvzD(uZJ z6iWrlE4m!&?KSP$+{Go0SNIoqxw?-8X$k5Pbu`U95yjer03K~L8Yc|EgveIJ9v5Al z;C~K~-aMpVuqaE+GOIh~ui>a~{p5kVdqlIOBs-+;V-+Lfj3h8~2) zfe3xIE+55ci02xud0VwnZu4OLdeE!z;*I{d$}0E=h3~(^Ph#%AR-r*5QrisWl8!wS z_!aI0EUY{qm&{-Asbl3Qlq>z>#0dsm@mU*5$9lezNb%D9$Zbm2OyFSOb_( zq545*zQfOEpzSdBvF_pwHurhLVTR$rh!o0{;ck(Go_KT#z*8cDo!Sk<5eqY@uR#o3 zuZG+qO~?0tULG`%DHKQityPxOwrRQlt$>v1X_r0VZqwuQ3O3;=wEy{Q2>dF;9&s4l z?IW(R+E&G5?GM3YYc*3&zV(683$T;)7>L)e8~smb`8bfBNP@P(S;dj1IvuXLAxzK1 zc-~33m^3sbvSdbZh*-fuskiRj1 z9HjZY-sAKbR%w;a6;wAa5N#Zu);YvhZ8Kna&u6~S_TSlHX~(OkPkej!?F356^!mxM zcDK+9VieCB0Nsr+JuJaJ=51rwr}*uFGtq3s3btx4a2Iz~AU24}v9M&NG2J0!=2|+d zCQP4Od&?1?OCoFyl?JsbnwRCDnB&OSIBScrm2(}H8Zz4fM<9kY1 zUak$foz?u~81A-Kc4=A8?VQ?x75W<%a*_04Atv=V1K>anwflC{QL>|(ycj7+XL-)z zJqDByp}xW29~Md5)ur#^fIT((1%8dqPGvK_{L$+G4#W;fwWft;I^9Gnw5Q1YKfaQJ${nD-=u?{+5ZXkp_3(znS3d##MWnac2L|FkA`X+$R52~u6 znagqjxf^Y_LU$3K`B*Ms`WnUv|6l<6uar~S9}XcUw~P7+YVw1N$1cY-Qn>{TVE`LEg%omA+N@QBc~&yruT~lUa0Bg~kvLee zKg9wm190rkJB^P7pbj1Go(riZbqb9Pjbj^tM^^+O#5dT7q2cNHx8eUL#_tSN|0zFm zgFkVffWb{d+6{5BEGK_GBN9EU z^WKqZAM;U@!2ntcIdkX&#b&^C*FJ3$oQ8?V=-Vi~9Y}$|99O8vN;VC@w+(|Wlrt2Q z&fdzR|Es%mz}as-M&y<9w?3^ag^H!`B3;?@e32QvwlsLJ2vRGWGRD*eT*V#+e2-RTvtt=X~I?48WmXU)dd3@%w4vgx!;b*alv;>6c zUrRwa{bkp(5#yT7@6BYF0;<0~vCcr4I=uF6Oc9mXUkh-{Eic@Cz;d!@)_y(blKjwM z_vdh~WH+;*^SJJPfjn6su|4i3GY>u)Rz$m-8+?k^g#Jn&Cc+KBQV8tCMna>|VBFQz zZ{l$yUJqynZzU{%bdU*=TykTU6s!eVlY(Z;`rZZ%g&^KC56l>c!hO}G+YppTLronm zN^w;s^Fwt;JwtuY5&)}gNo7t36|_Ndu}eD0Ftjoa%19czKI@RiJvNLD&x1wIp0SFt(# zuK^?O8&ZFZ3iS1p>F%7nP<%fspU%n0T#dsr1r+d%I_S&TCA#r) z8ijv31OoA8fO`nyGvGsF^C?$&uA*a0@Z2g&(~e;5WkErpF0&4J3Ows@x&ZvCRLdWNw|4}OOMaugla6!1C$W<)7 zYQmO1xe(H!t$*x%&?%u@(qK4*@WplD=gC&_>6b?6N-IRX4 z3pF<)rTQVcTn^SOe=pq%#7@$v6qcLju2Bv(bJtvsl^hTD)Yy8x()K2DL;kHWLxx=3 zv@Y=L*}bfc+xi}QUtppFn+_MP6`2%kGH{D!d(cY2@F%Wk_vf`NcA&|WmSCZ~vAF*m z9(39Pv~SU~vWKrCd_ZzcPs2`3y?*^A2>wSikXpvs1C%dqkPymkDglTI7ZKQ7IZ^me+-Of>e;kUztkUfj& zsuTUT=O%R}SR$Zb+s%YfORTMyk}F$<@HKzVXBwCGguco6EAb1pprgFezii6tM=Y?D z^yxrp8L%_}EVM_Ec=!TriofCcjcMvQItp56A}grAb0+9!lMV&Cag4#(G z=4Kc!*~UlvF);t%WItT{mT@PkKh%)Fust8N0=-<(bx$Tghg1YvDmO(SB1lRh9v)jm zd!ultw*HOa6KIj=6dHC}zH_0Ivj?JZEdZj6CZ)TzzrHm=G;MESj9%07`;CQO;jgdX z#Eb6?XfwOsX2B`rT}QwL=`x8x)`OJG2Gk`hm*}|!Qwea)>^cfN z@C@EP1Uph4Z*yLQK9-318UXZ6W@;*W);6FATPTfzUdzoj#NP}uLwxF;fWhGyy;#0g z!Lhb5>#@k3V=#|b9ZG2)K!g1dz|8swGl6dDuip|8zp7Dnd&D!%mUDpPANOEYS9d2t zr9*gKMZDYJY{-9cihr?T|3Z6ObqNR%+Z)T8a}?v0x+n7M4!~!cu5mHo0X!~SJH9}5 ztFRl9zOs%ve&`4V$SH)iJalLx0FvusIdyO=3GLxG_&z9{2y)@luN7<` z?S?EjS<`fCMx=%BSy_~0+TD5zi6_68(^tI&jiK}D--6sFw53ktT;KJC^kv={NEY%sH3G@fUXqxn7@hwQCHHdJ>C;?J&74#?RR z#hbJ-U^gRuLn_4*owcfWy10wxoR=ayN&vDYwASOR6ol+lJ*r~2MEG~L`wIWBGVEv+ zO7YiX%5HVDt(e%=w~aYF0<)dHc?cSS1CduOj;HmFhXQrrmvU^(=u9p`?hB(cyoj6& zHlD@*2V=_@z;<>$iH{dJ--n zHzKzs{$DQw^MNuE+{5Kr&8&640x*N>mV<6G-mcl9Pn)ESEGyZ&viB=eTAH-!m$}f) z^;i3_3V4;)m4lWggGVmFiB)hRa=>}Z@2hHm6_fHo1Na6qoHp+qw9UHI0Bl`kxCq{_ zIAno8c8%97pWpm{C^6s8_1l);Q+H|hug9$DnJGl>V!G|tH~4*DMe+E&>YRfhdF5Jb zxyhD8SGYT(mBuoZ(anK=ARnl@LEKCxG}|E7a%!>^sL*0|?5Q0C+zio~IyQ|w zG5;3MZ)|k@6uL3u7)LnEo^e>#GODBz2vi6k3h3UNh}9(ncFS;R|7XAK6^rj*S*=9_ zvBFSsn6KqD6HSRh;CT$J4G-s+Mfy-2H&p-VzP+R4U|Z>McZG`m7YOFD5Fl~0*fL&f zF@3!vV7cIQ!t3txK=IXA@cwBCO0TCD$c%ZfUnF%Vlml`InEuC;k0unnVf3nG4IwaeA;?{23I68O*w81IqFIED zo&t%<@CwC)2LhK`XT<@<#LxkaGEZzGS+C5cE=z_SSet&;x>p3O_fu;w)rLd(#^Mei z>Td300>gNGdZez#$Tidg@W56SB2a}09udrfUz(W*;BK(ma>Dx&@y^OBk!M=%NwlG2 z($z#a5-hrlDQ+iE{{21;zFy0L)>KA=LvN|6C;(Qm8h*G{6+P_UEkZnb2^#Q{gIN!C7I@ur}WsNXB3J(VE0!mIz@kjQ>gb~|xEd7V`HsV7dtP29b9i89E ze(_RM4t>I1d`A1>DoLggSivt3M!Ez76;JIU01z{V)ZH9YGfGg0BdZA?UDV4@!v#dODnJ{7cQE-~^e$@>CS`6aVL=yz45;R+YT zUT&%>K%r}Mizw+m(wyj;0fs~yOocB2e`2ICQ~^dRb2Tv(t_Fus8Bex7h+{Ym9_?dR zu}#1)_%W2^GX1rY%+-;GI5PsE6;XkhFTZ*{EzU>m;-&YZVaAr-aX7@F<|a^sO<;uS zcPVXJEbIym111o`pYW1KV}Ze3UsNYx7tC$9S|6Q=1U5@YN-gr9G)T6~g&U`(+csUE zvTXniqP|0=$X)$@*<6*iIZG@ma5;n=Z>HjS+AYx}{Gv03?n^{=*S`zk3>Km;z{ohOLJ6LKep8pQS&do_!1^l$yn#J}H;FDM*W z-X>u`klpUo0*>hwIb7KP^S>C5@oZ~Jy~4vR^vjvsfW7{`A5M4`{EnJQF3_y=L26gp zN@|Vh#FZx+aU@4Z1X+wVi~$jZ=NmRMlxFuRNbzayUR^ZE3lj8N6fkv7T%3WqRL8LNE`ZBS9Vq5w`bQ8%)mTU&%nw8}ju~F`7c4X2mEiGOfW->_ zoLVOJW2)U{!^+c0*0uW8Z~Ak$l0iT40nF$<89L#^JZ-0HE`}0aGy%LQhC4(?Mro2v zBAuX0XD}?pTF4<3d7->9bx+d?X7-vjvhD**<3d*E7>`eLiA@yUEJj3|M}Qi9Ni4=X zm;Yf6$dbk#HSe>2e7olLFUULIZ%x!K<;}uf=)`9NR^uP}H`(n_qapiRK=yXQf#9uu zUe)e}P9KQFfFCT*3e8haUV$|naLCxYn2uK*4mI|S`8T`Oczq-Ofz*ZCYAMiOyVN>} zboV4{pfsok0|L08snk)X7lMbzAN!eC_<#Glo}SNGJCXP<><(W|91otT^*xT5CK}$e z>v9rwWIKYFcmpZ@78^HKH+hXrIWV=Hp|2>;J+f8fHa$Rlz1f|A4>R^C1!;+FJ2j*2bA-iYsnm z9tY9C&oq8wf)O-{d&huHlb{CN%|*q5Q8w#oxsh@}05@GMufrW9SJLYB@uPFU;FqFp zvP96IxXFqBM)dazC}HjcVi+Ufj7TrmG)=st$w?$_k3s~0W<@zoN-g;FGkirkC~t97 zq2VK#EK0V2y|H(~k41kkf@ID%7DJydKX5>MQ{ffVIe@4@Dn#((aPJ5VNY)Zc^2nim zh#3+#Bb07)vR&W2Rrl6`#hegFu9_1W6s22%m?EG0ZlyrqOceIwJSW_Qer;&^*WVTW zw}M9A0^}WG$0gKwtDX zNcdg@;wt8yj>^jRPXqvneo#eOCzOdoZ+=-PGcxmN|4xnGQc@w5Naf%9-hw}afVB>& zFLz*(DQ}7}MQ2aS0Nf)D=q3=0XsoPn(m&kP$Z;jZ6#PQ)XJ>%;(l^?AJjyb}%0z!B z`U}y&8vknqJ#E-9N9@67Q1@vNfm@g&uR53t1Ttjj{{|wLWQdZtIAmlNjB{~S*%AR{ zSouKGG%xri=R02XZ-$Z*jR_r&=_cy66_pNmgN~3P(Ml0E|9{Zz(Tc?TtFPG0qTx;! z+zyN&hA-~pN>~IN7ODLu6xdBhx(JyxMyA#+^S6M{(Dq@kqk=6WPx^&vUxEF(MWoCH zd3K_BYTo6?aEaRiV7wGmS+dr9)~D7vvPhQ(A`&T7u}D$C;bWx`@3nw##og%Wb-ss! z-G46BAJ4pP!`mCf0ZN`8x-we*(X8!0K2%{D3Hz+fboConVx)Y36AZ*kPf3x?y3|J^ z#hgiJ$Z(f*WW1K7C_ChyT@8dlq;5hRnu|*nidD;IM6yKF?;57*OQfbQ8}00#_n#DWYV>1C=5ekb@7wW5E(pF>uFrS;OV9imA*b+~3Pih}a8 zfQSz$OqbQnC-#3^D~Zo4x=6tv{6nejRj5G0Z$y75`g`XO`zm z2Dst3f*;k=7WB_$M2j+^L;E&(^d&F;Nrn`U$TCN5xc4I5#Qz8V%LcTQ0dbd^uZmx$ zIYtzN(#aP{=%VJIK*M(av9#6U$Bd6)NUdyH@YmHCJ<&HbHGk!^Cm+pzR1!iFhUD3;`Fdm!n8iDlX_7G0!Z%h_?` zwv>WDoYB-*w)~jLy=m0@#H))$1dmB8_FQ)GrL+FU?Bhy$pM5G(f(q~JDPxrkg}NiYD*x28Ia zED%UmgNMx5f^?@H_QD}t&FO9I6|h)5TC$5XY5+MvHN8F?R0;lSu<~dV3g0tbkw^DvH)ghj!U`fO7x@?rJcBm`MtsYnFmDPzm_k0Sx$!;BT$B;*u!B0=?U{ zSJ|eGpLoqXYt>1z+A`d~TEV}}h-ad0c%-sQ!MV+Bl+}9SSb>)mlNGk8!6@$b$aD!H znjOQWn_|2u<+js5IM8=z5U7-rU_N@h~Zh$^@9&ep-q_&p%!vV3o{`h9t(>-H&DI#Sh$aKMxoeayMDc_<4BOf8hxS>Xbz@gP z9oRoDs+9vpa~GPz?kM^*{I(dvnf@W)V$hi1ma+HfEEzIw_B0r2!-9uX%XDJ`X(-?A z*`isO2Uh2m6aAg&Z%zMZo^s!EGzxN!ewq>4!6q0nZgX|P59;O94)+RxLI>C(el5kR(KNd#!XjBLI*1k=;fp_+Um8yj4h`J(K0 zjHkHe=q7!$?-PJ{%t!DVPE=v5t=Pdpg0!{~X=r)oI@*sB!ISE{Bb8L6k$uTj3q{$L z38@VZ;r^+~r0FC!kZFiAez(j=u8yXxrk1YSxgS6_!{iciUWO|@H7^DNu%rm*&u@&Swf4F4>4aqST-#;rhJ%GsI@kCt5P=G&xykS^7_ ztY!gmOvQ-ErUHc=sY>pOCt!+drYim3@OwPJ=wDM8YjOj_kA;O~77i+8Nt|p_(ccOF zbVbFdWT7Udv4q@KDj?xsyj1Wn?}7N|rhn?$<{Z|dL4OAv!;{CnV&;;sMgNe>sK#mW zf4E4^KV_to|DEKw;t!uCzcE%ZloOftZPnlOm(@|W!yh`*Ts^X&c~gr^jnh+XQ^9ED z39F?$_&*-4#Q)RNfJfn>dzPF2PV~16ex|=M{bj))I4W{ygIFXn95@|_J_5mGWZa6w zp+$e)<#%?%tzd?`_?DVh*8aQxY52YM&ny23!yoIcxr}gy$ zuP(`Y?QF(L>q!z=j%2thbuLYZFB?N8G$|?);hR|dpp;SiEtT|Hqoz*qH~oVSLhz#%jR2Yz znzpPe8yR%PkjkrPBg$zILug3Qz7C01pFdxfZ-~X@lOvUy^BOA(+isBFPt?? zJ(cR93D&HC!_PT%3L+1ndGsK_FrPylKHR5^yT=S|DfpTH8(`FUS-DSAInRVaP-)M0 zEocz`1^wUn-?7tQTsj9OaIux@ERSW;-${PsgcAIbbI@vd3p;0(HDdljk)m`>1^twN zX!_G&(LrPHX>!B?mw{L!w673}p6C{Ez{;9ZxP#?}w2}(oD{H-wX3bOjr$(rTrZnk# z8bayqahTUH=>a>SY$H6xBC`>zm7w@MSGPgQZxdIM^)Ty+J{SGh-)?{Co&tg&%|GP~ zo%F9V0J0hz6`%s^!GKg@KA-22=bc(`d$q-47W^&lpX|FVnHQGKVzoU7It%_l zZPhShENVz^4>axYlVKjRM&fX>j9f*C{^fyg@QeCVknmY`$k7BxdavF|jI@I*2i@=& zyc_%uh`IA>J;nO4$Tm0}607S=FG>?`_sAzV>QWU~Ma&~<`lIwFcKQdzli>GKvT4%^ z{xdEP;#f8q9-RXp3XqWh{=DUfAfFC zZ(4$Ia8cw3U52}Q#9{e11r(!M0>wX3boGKiZ9u4`8my2lsUD&1I|;2+Q#ri}PW-=g zP=H^`sC@99|MNA~iED5eL@FJZ{NqB`?(mDI=bI+5=pSRNJNqqAs77P%Z4TYH2+6MMbgPg_?iCA|G401{_$P^_`yG914=p@{_rFq z<})CD&``(|!2sMN`IBmC#gD-URIzre9SKa-3# z2`tix!!ZF7dBUx6EX{~&T2%!qGbrS3iDqwWgv>?k%#CnOILMw#zv!O@iF1JVPs{Lx z#ALgok*2-0fFA|_QbU#h@uGh@o@oAYf*)xQ+`hvv;ZIbePZ)ksK*MIZ=-+*1;SEKN zJ|tBoQb6n8;QwG0SB&DHW^%9o2+fIq>!&KldIgv7^heP@hhAY?Z#?+FBx1qO0uMl* z=IS+$l|M9v!lk$7sw+j;IL=xPBM4|!lP|g6_#Z_7_S0iC)b>7a_ya<7arc4IZm;a{ zJMmA;uV4j^P5%g~v-zs2UrcD?dhrhkelHTC=wGqI#y=(2{O}s zQRtvq12WpD>)aNwXQC6Hew?`1_POL5e)}+9nX8Esn70QL?Il`c(u!s~*3?*8ELyYP zvbQzTrV}fZ9%42Ne(4El9u=xKMV_ilv7vII+`LPfkg-;@5yG3+YzSieZXvH|_OXBS7pFROmlvdVTPTQB=-&w4}gPdf)($!PeS z{zCMx2bM`=`1`LeLZu6pr>mO?>fu;OCeT}-ggi&mQfvNF{?xs2@NOpx;cWWjJN<9? z*FfB#K+g#<2zqXN6*y8&m#7E;%=thADgdfJ|FQKC}xpfMdX_2%I!mUwXM9`ugdgd`WMO zUigut(nUE>Zwj7R-J#wX%BST)%VHOyG7dmh5j?V%1V1u$17Nje$@0Q3u>Iw4t4<2| zTrgHkNfp7Q6-tSVmLuaRe+oSu1#?$HB8Z8f3jVId*jg2bb4!&!Vc(&2%n5(_37X;4 zqJ~!*Cm31^euwL_2UyJ3yd>)(3_k{@IDkp~!w*Szmj&ncw+(Q<#8F9p%|6>v0aJjcS9br;+>`nGIXM z-5PqYkuNW`G6`L-8x?ge15$dbDMqGogSP3Pza-_wT`(%~IlQe@s=ro2OaGwg-!#X1 zHpgKeUSO4E-3k7*17}NX(I2l%|3Cl$AOJ~3K~yzL2j|D%hyzw+8|4_Mm9Nt2Z-d$r zhcZ+sFEP4ZYrt3Z+_F&4bkg4m{#*YD(SPC6lwTtf5Q-!hIRHE=Q!%0gLjlP(64Im9 z5-?=Yrvs#L64uAjMrJ7_fxlB}MPU;uauGbacWV2gb z>U~=?>k{wsiBOBjS%uyKCc{sxQ?th8SIU=2?EO0lzwPw*=4)ije}l`Vns6+I>DxXL zmfMCM{^FqJVOx9C2mEX%NE5XK5kydxkH%VoqMCKZh|mq0YUmD)j`159hTpiv??n~Y1mCfFM3pWug%$toMx$!nu?v1b;6K8~`*WJN6aMT+(kfxG z7!>_OLNwMd3O@rPD@T_soKLPkborJ34sPFG^1D%C# zmbJW6rf(1ad8fbJ7J!&XmhL1Q;3)ck2>y2C?4rLHI=BFLK#0HO7vxWP@qZLialzmE z$C~~tPxOXJpQ(NUX*aIuAJ$7s5KxGGG}nDJ(w}i>*UY^0as|J%s22ULfxG0l;rDX* zzQT`f2mr?u0O^uU)JS}SheqNSmwhr^00vxOBbm%qrnY#9mHkKnYbNcYu6a%Y*R?_M zI|DHS&6lv@5|00PtWq@Ge$DE>nvi$l>fC@~=<&K&s@ zyui$VJI^1<&A+;|WWcWlzbSNdJ4cbX;E%rzf9hFMRoNzt4BKz`1EyEWednG!*2r!b z{jIpX>EAkZjf z{GU8Ox*aPo%Rp_wC70qb%}8SGs%i={*~qIV5d3Htl@(7TLM*X9rM0!-_Y&ctvy}nR z;-BCIX-%q7sYNVCqb9N*B1`}7@+$=YHcKwx{w0yve+&L_AcXW!^G|z4rm=BA@{iP& zTAIgq_)CdN8xrRKfWQcT5JoFLI24hbXO{IU{yF#`#sAGekVX8Uf9Ow?{xp@AZY}-U zZ0%RtiuF(bw0P)7Vf%R505Sjxv;vZjGdo0DerP(R^Rx3*X) zM79xW)l>9u7PtD9*`^tZ;lJr`jlm_QOK-}4l28f*=H=8k|F`+M;{en3Q(*m()cbh> zDQ?l)HHYXgJO2nB{DUQKz*jvi4X{?!+wOmu9}xUK1VSM~9Jnq9i;Jyw*$^-IrTB;8 z-wv2be&wa*`<73Jn5a*2q|=CiAuJwaW((}J;17ZFH~KUGGyI`a?6z13F7=d5$F>2h zICvEOt!!}X&lUlr5JyRVy-axA?g|+KflKzi-26jM^}k{tl3xd09+JH?erKgwXZWO& zU?}M(sSp7VFn~X?dNweCAIK0Jg~ zqCdOp=Z6(&`E1)U!5<|d9)uNifXhtPKI!7Cwt2-rzQgY$VjbBW-UH@uU&+n|{Oy6ZJhtB!uuVqoG6S3H zDcuC(f9&vk(ZAtOcQ6>Dq16lQm*XYBUQFU-fLjFxQ*og7*_n!wZTj1#f4tyd9(Q{x zM}7j0PkmnOflJZ9+&c&emsng?0To7&i0RR{2L3M_)1apqSeXhzrrsCKMH;!_;m*+e7fZ_)IfCgnB6*t zKP~iFXM+C0SZfIMg8%qxbWrd&|9E*lAngen5uwz?(v_XmD#`gqe;?Q~9s?MalhKQR zexW}${%QF6)vg=Qqcm&`R-@KVMm4T%w``Fa@&9c88Hvay2q4o7);KTpkC(Rl`Kg0N zv@-DCvfSoP7BOFmaMPbnNV-HN2vy7dF2%pne@aq74_}Q49t~!#OkI}ikQ#tg@bBF<VmKS`)Qm4ftIzPzin?l)E1n{IVmJMBl|gm6Zr8Fch*$hZt*F0xiK{bt5&7 zdcmLf68ttGTz=4hL4SXte*-L}{ZUC3HUrjzP4sWMZDvUKhkUE$It*g1IvC>(g?|k{ zI{~wl<176?MK0U_ z41bTs(yd1w>mO0r`DeSaV|wz{Y^T3Vn!0e;*k&B^dy<;?Uv~I~k@{bOe-iwM{5D~Y zluXnA%yf}-$RHQ$3;gU)zcA9jg#YpX{r~;sG>s&8>)HFQ{Z~#nAhAs;@Blyc?@t{( z^>5S#eV}#zrylYj*aQE8X$CSM=>h&j!SA0sV91rw z8HCQ*9%2gr$t@EyT+kd2&vn*i;_v_6JzBD((2zgHTFff4j}Ef|Q~N)4!2EN-|A%fL zgv$@+5AeVxBj|W_zz>K)CjS1P&G0QSH{0mWJjd5K_$mH*ihl-uWhOZ=V)!58C_f#-D`Ws@jPslJZgh z8nG{)1^pRvC6YcD*;!x@E7ctSLoolRPBxGX-p`KF4gUlD!4zOrJ4r|}#@~Qn{{9~z zwr5s(E)E|Ac^@508dz>@kG0Mkr8-9v-W3AqzVgF@ag2aEwkhHq2U7wE}n&cw^0>+5)52Usg|x z^Sb}a`gdHiciGE~LxXPw5DW~1Oo&4zma?4$RRceZ)P2xkTfy+n)#?kFf0}Iu0sJ~B zgZ_1MQPuWa`=IDADEJ%T!=5#fl}_@nva^k>>8}gCR|S3O2+$b;D0f#WO_@+M{IM&% z)Vtc5n6+2ck-Rn<3*DKg&om;>4zup#)jnb~+t`U;Bnlrnya$LF)LMSMW`cPQffoD^ zCN{LjWbjfJ!UNR_Dqafm#IN9cV`FYM%2Dv1nY5s<+5ExvO(_UE_W zWWE2h$Q`ekqk4O2NK60>%^k7vfvVMd%KZx6ze>q-f0TGdNGtIL4Dc}kH?It$ds$t3 z(q%9T`Ask}3|a%Aqv)R#y$2bdNn}@&;o~3Pj2X*Dvwk}rzB`8Ot&o>725e+L&}uXt z!?Sf)1ZtLgCUxjR&%@BR0ur(ZZLkk$kEp2a*&yyZ{x$qH3Lm1sk64&`0BX6cvSQ*# zYpDB098)snvXebNR{Po>^dCDqhxTFqztQFo+eOYYZxhGkfbA#*=X~}J zcZFbU3isbuvg{toJ_JHca`Bz%bZ67O(JYNlj}6 zHV+*%KlM7_v0%N#(WImY9oB26^~xJOhcLT9#XK8Q>U-Pv!C~oH!9c1^R(H#)JsO ziv9}X4BfTQ>xTzI>5LPRnK_vLPu;@#r9mE_0t1tq#|!>&-{QHkU`b2>ly+)B1V;_c zAb>lXVP$k6^-BC6rb7pfVnt@B0SLyxM}dKl!bLs_cFsM?M$Kvql*3TTn&r68FjE~4 z(1FMi8H3;)3&PFsVLescr2|tT10SUC9}bX0C&Y{YrGdF^SpRK|@^vJ;tEC3*e zW_!zcX>kvO)baol46COxQ|iVF(b?_lxzYyI34RTE@SjCM5ItM`Sh0fmzF)*BG>{)6 zZ{OYs_LzRi81QIH{MR9oe~X21^)EyUen153^I1~#YOW4rAVlhZ9ftz5fPaY8Y~7o$ zgjAiaES{w?0dJV|{jLqnJrV;b_PW@@fNpU?m~afn>}4`Qma87^^F6hkuz5P^z6 z!l@f`VFZL&i$Bi}7z=_C&x5$7U(qr?$n$6|)Ml?y%fC{IhXIJCE>e5!Sz}pDFKnIk zVl3au&4U5D@w2nnJs$j-L`CYeV-^>(bN*0mdVZ9qhxTeMnJu zE)u)P(N{k8caVFvX7xBc6^k$UJB1?f5mUm<>G`^k^g$~l!-++-#Ewl@n-KnI*YN!X z{t%UXh2dhcbZh9V5yAV4e<88A(7G3I?>f$4r@0b6u>;oB=<^^xk^LGkA_x=B#o-3k zaGz_!^pb&RJ^b(@^#XoZRSWq3+}Af{=7py2D6@r|ZQF7wT?4e}56Iv4?tC~?=^!JB z!$*C+xyuNiRzs}HnI?hA-?F$UG#dt%y9v~0uE3DWd{!tRvEA7@h`2~S2nJ>|A?zmM z=p?^N)}1{(*1V`&TC$ie34Tj6pjC+(8`(*TZBIVVvp!*AJalXi4T}<=l%4&QusE0v zP>dD{LJu@XSe`D_6e zsceP&pG_n1Sl%SQeZ!{ptDZfa3nSgyuQ;yX)Ilyp1%oJLB~L`~L=i^)&$^Mvl|gC` zf?Y7|7h`8@BO5#}1X-Q6xVA1>DO5+B;q*Xz;SSX>0QR${R@kmYqgNCGoJ+=V~k^e+c@AA1A^QFjHO^AD+G^Xs{0WTMe=0JN&25E;XOs zxnoR=!31!3tK~i%qDz_n**GCSoRU7m6n1o&|NCH)e~OA-8j(Z0(OOh%aBY|Oc$K@0 zN~R5!Tps560~?VU8Qu1Nw1Y@{81-_RHaj!+XE6nDeRMF# zsOL1fJ34N?GbuO9CO4G6J91M(SQ@TDVyx*wfG8^$;W&utxP&!z`-~l_P4Wdk;KMG0ID68#9O28{N!l_z%bf>-4Z63_%$;R}3k+^> z^`L*aJdr~AB>c~!kp}B3is_sOyAjCHoz?5vYTN86yZa-#FT?5qDC9B z0?#?D2ehr$b%*xGu`h1Rfl?xefUy2LtTMB!0}k;X+vxm>YRrfts`^vwz6xtkX;3GT z>sAv^oS-`-{j`JsTUE_UP&@?r{rok*R;*+e<%A;kH0~j&ddvc>HC?lMkMc1Vja~ci zoJC}R&dW|TlYowu9$2O<)Yu2LMQ%Vmmr9H!vK)y18HPU@2;e2jH(_A-u`BwP zkg={$bXc*r0q#fjD6qjmNz2?q`-H{J22hJ&v>8yS`QwKYIS6tXr{lw}Zre87hsNke zrx(dZib99dk=1rZS3--UvUiQYMe&L=+#)y z@nR2`4u1uByx`vo>`eKChRhWV02@mq&E7nt^eDz!$#-VWr8KMp0Ge*xDjfkMvOvly z=s>l`PHwl1Jx*o}p}!5k5Mi<3*wP)huzBMPuvD-r{z_&bxDuHuXw9Y7SPw{3_3)u> z57C-k{i1)nZj*rQfv^g%W$!~fFkL=(I{uahOaYb0f873m+TJzUa-7!=Tp)L5T}Ov| z4n6-7`pYAte}DiN0BToWJFLh$r7ck;-Y-%VwxwB(*({eV=*Ti%s1`=&r$tYTC=vq4 z9P%Q@vhQE3di@8mS)*-+KIVZ{Kl@uP;~oA%q(_E-7#};EPi$JgbOX+W1(Z&=;5N>fMiOw%a4X&I~7>z#55PV?=WvY7~ z$wYs8CHEKp=3oNyD0!?S4@xX!f3PxyPnJWF+0v~u!`#4xCo!>p!WJWb_QSCgWQg$~ zUHvrfgcxvw|B&z(AHZP{^G>`$tmt54?RyQHN%8v0pp|pKP7R$OA0z-A3rmK7Wn3Z) zd`L3uY=~I+=WHDh(~AvTNM!2H1q+fh~kGX zDlv}N4#l7|_&fwN=7AT+(BDArJc0(HZo{gSLo;l_2wq1MQPD%yi@0ud_&L?819qS= z_8&{X#Jw*t^mCSRJH3^DDTJ+L0kp(HaL`AFk2O}wF+@Lu=mlW!EoG+Pq{GaR49vBs zQa2BP-y0O&Ur|c~jm+J`0Lfy+{&{3e8gj-lW)G#qXZ!%a)osxd*f;D)yn(a$E9@vL;E znGs5;ar6*nNO7nZiBK>_TyMD=v$E4I%mn~E(4kl!K!gJT28sfgBTPJBVGS={ElwV1 zvbtLSv>057%fqky_$#1t#+AKyvE^R20Q5M34A$)k5h`*HlBmG3IBA|X>vU!x#!(k2 zss(ION6f(t4B)diBA#OVmTU?@Pucf4%YcF5lng+q4?C^EWo2}fp5h)ti`gh8&|Pzzau&jk z`O$xzXGVVioUvaxvJ%mdWE?TgH=G@s=I9PJLc12=9~4__F9-77pf>$?3XK1P3RKUB&VmbJ z&5Pk35bT53r^?uz~T%Gk||8qO1T) zfP(%84k73uBNfWY)Cctdw?xDHi5eCKKB~G@hpz-3z#;<*jDh*1KU>5S3zWKROpUPO zAsH&zt*j*d-9nxjBbaypzs!JId2lUH2@r=8pz9$( z5$sHo`!J&)LNJIHA{cY6Ya_Roj$xnso`3ecFmqr&n5usoY1E@RLSX@bJVfL{@)OM- z5+R`gLNn!&i*;;9gJo`>1h5$G*_7H5v}FSW|M6A~KaSTfQW1e$Tx0{m0&C7^Ux%5qQi6ko+(t9Ds7%`MA_ES-1zu0|j0lb|?VPeZ7P= zq5s)1EA}Lko_NmxBj%P-N&wCB6MrM9{~_)a#erk`D_W+X`EaTf9xjobTZBphB>o%OcpVsm!TP$Iq zFe)tCa6)ajr>eI9$x?;Nl>H7fzw8fkw1vKYrPz9qTKdzqfWSaLEfNpZIJ_uy@Zrp) zSO(y)!~C*(v?`ZSg0eySwLe4)VB}O%8ao)xAE%9ff%>E`Ah4NDMU~uG#HR{};t`p9 zBbr}(Df%v91=vM}HPJB83adBi>3A+AktHqqeln58z00C`0`-g;z=QF@!k&lseIS16 zexLbwR;t7^PcHMs33Brg21enbFo**+I3py#jTP2x(Bq&55vB;fU#Kl2Fh(2-n!yq8 zWiydjP{L(CDOoa4CWS4US=!kYcO$@p>_lBMJOhsM~WGE~GO zPzxDie8bpbEd4_`-1wPQyWb8TrwISHY196?}}k%IRui z%`ZL&kS5`E?Cl^FzItqrN6{1nlrsrQT;YyIY>|unc+gVRVKpfVH6d}*6U__od3Nq) z?^6Baj12y_#R=Y**az{+0saW-)wF>nM)K4TQbUx#b5|5)y|=kV6vyJitI zx^w#ALZk?69@!@yOv$zcI$_-HqFI>P9v_M3iIxNS!I;MX4Zp3(8lOGiR74fF{M-BR zyDYUU$M2w9EY`?OVi(8jKA&tC6xh~Z!>H(vAZ!YI`b#lo7QSqO2x6f7Fi!feR%y`* zGInuDMm*{*DCzbVUFcE;WJ4}{Ds>3{(7>Dokub$DEJMY6fx5xX^t0R)>l4Uw&}7H5 z4Z9$)`Jv%qUQ|LO0`0)BgjW>T!)wkAtsR4|Fbn>W%@WtxsAX}47{(M^W(62K0<}gF z2blu@CTM@1$F|Jtb_wBz(Xtt~L`4s|{!mn-%VatIG6qiWX39>iMXS_onqRA#k(x9x! z#fGtLvpg60nQ_qrqYypsmAIlO1CSj69sFJFsR|T*X$(3t<(Vn$y?FL6GGMkvq2N7n zjLu^f`!oSD%^eG$y_b|{07ZM9c-wdBVT|he?z6Um1M5)Whmg@MD*+|@H|MUs2qu8( zoNiejF$W;qfQFYG84(_ZKbt97s**JUP5qSUUdrfk3wf%YLkR_bF#Ze62(~MlIxv1P z0h#!$y`vG+e-RdsBDyR903ZNKL_t)V5Mhoi7wb4A!_Q%IITm$XY(We%n%&-hbLp-- z+lxr%gCkHZLZ9OaxEW%TarI%GPO%S=hE7EK96N4%dB#$kLpn{i0rl51g8qxRPy$Pj zej=0$y&a8);dGx&?LS%1k)M)wcOxZG9sVxZ+EhCk3{n8c)NZ<9=Zd7i9P5Y!|I;S$ z7P%{R21QMYUj$n|@PO-T1X=TS2=SUd?0B65VMdAgF`#R|&86jzz?Cq-6@&8dmH&gpAMG z0g~nZO$Tx=qa=I9ZgMOZKOe7(D)5_%iO&&~@M!|bvPo1s3?PX~RM%v7<&ue-J<^Gf z_$M{t(6ZPx&CTrYuWYJ@%gCX=JIif6U@KW|3mUH>t_(AmIrT`-@nIX816bFAC^|2y zS+SsSVU#Ck*@StwuD9xQyVJQ~KA0UC+Bpb2x~#fb%7g-(q6{47bI22hDa;nUWrTYI zEa=#&BsMi|Gqvfnd5Z><26j1NRU?{nLc^^%reT}VCCa$)PmTcoJjIct`@C-(` zB18rdM>r4=C@%2owP~&)6yFh=$a19=3{6rsd-kEYAtZVcI!o=Nk1m~2On)kzSHE?9 zu+bvR@KseGvM2DmKM8j7ZDFrs_ZmiAfd^~L)mg*=3_SxN2XOG2ZQI{2p>wuxDv@#6 zA0Zet&+f9NEfTfzGK!$3Tc-g4b78DK2`~7ZZNOMg>7gR@(XnyjAwR=LI5d+Ln3sDa zpKI=0}&lOlIZ7|l5jiwKEJxY8QON;AeHh-bD`_8ge4CNKxk2iMjOp%GnfWaKl?M5$RlT68RGRV?;EE#teL6LHvt4DG_Y%UjaNzW!Po=G{aDj zpegz)SX}KXCH1O9?@bPoLvOiA9noKid$f;(0@4Be#|p4&7vg<#vq5kA6scTO4M#x= z{0yFC_=EC7cZ&lqmiJhFi0Y5VeQO#SK!HEHC=@IWc4uCjys698fjJEln&v!|BgZEjPL=bEJsKF+m!lKC*-}{7Up-;57$%@d`0A9o*WPKn;w?Bi1L4m= zN`?90v5B&$47f}ZJiJ<=AnZc{D5DnVKRAcy?;%oq}EGZ)qV$I0dZwDl$LB9(oZEpi2RP7}_*(-S1_xS_*2FSahp5Ssw7eXGSR1_t(q7PcMG zn}kRsN)a1srdZmD8OfzKU1eG4Ji3&&+-F!dZftdny=cqJ5T(5um^9IC&PDBgBy`Tk0Ptvr4W5`366xkWPb&P zG62DB&AJT>2)oSpBj|)o7}P}{3Jm`s)vyfmbJ@!!K);%vjtKx;BCUsZ@$^FiF|uUX z^a+2NGa{7-)Sp!$^f3%%F37+~Vc(;bChi=Bj2jB=r~Us&)%;Ub^9SkeqB-HR{v_&d z4^i$1_~oj$?ekg-GE8o47Jdw{gtEkxY`mv9{G4*_@$|&819JWw%~n2H?xaUF(U^dk zx@;-;aszY~$P8hjJQ51#!vuSRxyKM}&}QqV7>bDCQZBzuj{vHw5*t$5a)>ZJoz#oi zS)d8YQ7qfo+si2+i^;J+n|M@Tn^2Y<%=oQg!^wzcxc*_wV!}teB1EQQJ>#05b;hi) zya=NBbfJ?V3{uB@ZZk3B-$(V0`554?#s6YU+$g56b31U#-hw@7r zP*m66Qf+1-6XK{`D4ZmxNRa}RO@@VJ2mH_*36Ww7#XVtjeiKHI(*fJ#L)A8gmRLma z*(JSVchI0@OfV8*@5Y z@H z`->lEmr}JQeD0)bi&co@QbdMX5Vn;(MQzK zkDEKdlt?V#=Ryj@=O{2dc5vc?o2YOHy$vC2Z<7eHus5~#Aeza+5O(YNF7PT?Dq=o> zAc#n=_qULakl z2`Kn;`}!3lm2GClMd3b7`I=^evlfrW8r(cI37DsP;!Ciq11Rxk(Sf(~J(}>(rFDrn zd$3L-IJln9E#r2Qi*A6~k&&>YinCs)W^zA8Pd)P0b%7Mv?o{2Pe+r z_k11sz*-9IuL69=-b^Y0=IH%u4F2^T$AEUjt2flydK8XW709;aGc&Kc(vCp!fvR#` zX%s`=tde3^u@qR)$LF0R!HT>;@-3cw8@cje9Z%X+Yxpob71)yFA6b-42#z6Gc&?P+ zsw+0#Z=3QzsvvW-2)2x#wVPTL^}hE7`0LadP!x0BCN>Hir)zP|yv21}?>|hvxQD zlS_~s`InKff6;h)BDor1AhMP7xN>P}yLjxiM!E}r zDFavpB>!X2Oe73HLMSZ%q~!;`t9}bS8Q@+MVZ|u2rF1Q4#4C54sU7%NBF#9p34lEY zFQfD)S>sm&vNPhU#xa_&EQr3liuGu^i+(&J8s$jTvRoTG|Ny*N)-YJMnIJ#akvQ?PU}S&LxX*%#(6x z0ru;Jru4ClRmPF|t5p%QKT1{^*=^@%!w!Y;vn@h?j+?2Yz!`u~ERhL6B_KW0`K9(y zSb%oDUDVBpGl|`u0h$9YopG5_`C90&=qI*Q-cPGP@jaR-!KB^cwd%S@TlqO=x3)g$ zr@huLOSeZ{x`xjQ>NdwarYPa8kh3b^?90N2m41BU~ z@^b~g{8T_v3?X?hrck#)A!}EhtmG0K1p?S z3}7Upai^;VbmjayydKEK5#caBunWqUu$q?=Z>sE9kOYR5`U~Yu|7!`Nm>%Yi-mrK< zbKDGQK_^l{b+w;9jKeQ` z0;W_rk)M=z@CuI_<;R}j*V2UlN%m4*qvKncUIAW;p@2wymc%3Iz#``HIfuG(q@N?n z|J#DYkeor%{^ejlsDN*%pnFp$UZGs%ha+%~Gden1`0YjFP{RNt>i%C~?z574sK+I?p$P`is6?A1$NwlDu5r8cJ ztjeSrk50^fr5KX;X2aLj6iGxj@o$zvy(L$7=%o_ueJJhIY?@_pQ5;&x8^r4JpHZ-H z-^v!J19_;fxIXt_B97pQz)Dz7yR#umo5U_~Mo8Cn?9DL{zM^McA9GAz1hC&-N$_0g zbkQm-kDx!~vGG;N@$X!a^N6Cw2im~ySZFS)=!W0u&)?M=DFf*9A*E{}qV#~H8L{D9 z?@=FcZh@qzH_L`WyoAeh0P$$2FB+{^hUgCZ5@UlZe5?-fXuO$+=sR@~1#JqXfNSh8 z>Ova|9E!fx=K47*!%UJUN#?x4Kl0N&+S0xB2NpYfCE6XQn@Eyfn(c6&0`OoEYK+p`ai{b0kqLlu}L! z-_;OlAn#zQ7-MJ^UwLh`$CL7D6*@40AE2npyT$ z7c5Qv;;Z6{v|Grj=Qz7rxFB)>(M3ga#MuR=C`IAI0(_AjvYioX&hj5^uKpN+(?7${ z@o`b4ZXp@FFYuqI>YI8XRY%1jyqU|c+YW2w z!oA`h(AvS3tBQ-)J^`x{FE3&3?NCzPxm2Ia$y+LhJA%Uj=&i22(AJYTRytd9N}vw2 z-b&}7RFZ3S+Ja%0<7SCMlD`T0PeT42r(oD+DWx6@t=bb0pp^gv8QP&0hQeh@S?YJ z3zS}dimE_`qL$)g$BW*czF2d1^tNBPf`Rgrbgk39IU$BfT`QI`9>4`zo$E0C_R#FVy+H~4wP=riG zF#?jWEHWB=9HXI{_RX1;*~Jo?m3V

03m0W9-p;;K2 zs*=rRiJj5t)z6&;Hh8d93CnW6S{ns`B?bY%3l@P>$sn)xy=V8x4GR2ME$}#+xrh80 z#1R^V(?>rh1$$iRpBYGe%~Ws$wOm~INGdgrY;llHk`i>GGUDAP`cJyYln?R9a|nkz zyzCS=E;KD*0&q+{@)d@qD?7m--;;2<6eG!CaHl;0#mi48$nYy)l=N?p8~s0Ls=nwa zD5-q2lK3h>3iwQ|=y^#PJ{k)Ro1smys92q;;00At+4Tog;J$3&YHnEJuD(H0Whnm@ zn#V2HN818KnDr;qzt^>|y%U;zXh#?1p-cGC0sCTGGx3ojF1!*X#}+59;ZC^ibv3$D zEQu1tY!b4v4B=0L;ZGK;fs+>`VUp*%#IA{^C?d0bvKwAqk1>W#~d!w4bG` zr9&{Nidf=hkR{~Q{seKm2(t(FJsrJ-B2?SCQni?*YnyWArUmM&a#`&A;-&X1h5r6M z1`dS7R&tRx)B)c0uUxY3KARzz5Fte#>TlpjRWGrb3}FYx`p8w2>r{5@@tX<7hC@he z82+9`@KieT6}GE#902d18zTfKJ`D|?!QhtsY_pzl9X312u>;GxkS6RwV)^-600u;t z?i#2mFn=5VRVT!CNd!PBAIYrI`iyA9e8(Pql(ort~3Y;zbNf(jXnw^`Fg ze9_yNKv7vR06@j5$skB;u52%0w#rdEOJ77C!Q3)8XLXLN1dLu==;-i^7 zAd~h5i}ReWlfle93OlXm9k${av<-hZp~`TC{Jd#T`{%{Ak ziMcv}(?B&N!2F<-6#n~en#(;1QNMr7t~jSR$lq>tbLa+i?3FOPmW5n2jt-P=8Oj^1 zWv|x^0OzAfze)593Jgyv8sc`PkeaUS88I&{e1M=g~LBQ42aH(@UI*X zRK1PnRs}7eMM8^O3gj`@dWB!x2{Le!Y$#l2v~dB~pL;dLCd3w@PaF4>(J7LHmqH41 z%O-XEGme_%KcppOi3>zLg1?F9S{IbJe@~`rL=SK>6`1=RKz9I{EMtFE^8had_EmEk zHS4FAK+pF8v;h&!&`kHYyaw&r!UIoa!eRuhJq7ko6tI<`wvCHbq!&caDu`b%kmDvL zMYper7Zl20938mQAP!gAc!%>iGJzKm=j|<$0z;fL*|kkqzCoVVAcFd0cjBa{!1Tl} zs0_&d8qD;Ml85OJV8SY~4`B7s7eOl4u{B5w`rFez1qMzYx!>kgTSp+<0HMiGn78i3 z`m9VEsr~Ipk;d{O2$o9HXLX!6y>t#pXGn$E)yrC1QbQ#g^`C#^BT^O%!3_<_n09XKdc%c*fYe@3eXnK33Kz~E&&DOE?0CQ^L)DRa$ zR$WC?P=7IhuMmD`0D&8enmBd{5*8>xE_d&&zqKHCGnKpOT(89Sj*B*Ke`Nt<{KbiQ zTN-89ZWi$G3Q=Jj4X~g~p|duby`ukaO!=Kaj^H&vt&FSEzzhB&PcuL)@S0JuzcZu5 z714rDldH;VZD|HzhC5F8O?Pu6v^UICI-dX z$&N!z2Ahw(@D^DMaqso^Cb_<^uoJRM|IKJx0NL-;9i+Rm=V22EL}~iEWRSrKo@0cQIs`Ph zt`vHA;`c((K}Y!lD?(Y#0Km4zbxd42A_nRaH0XKF{-BTs`ElkQN53VwCYlzbcRhF& z+|ldduM_@iofkyZZ4L{(G^F}d1v2{EjlPgUemR6*iu4IEcEC +6EY7dr56a7qsy zDNVa}!#ghJ=yI;?IR-lb7MH6rk}MG1_$mTyzj#eiZprj1hE9NZ{X~{Vv-3*)1Y}`g zFJo0o*wFxs6jB=GozTcic{8J?j+*^#A;4L=VQ;VPcgmJ81NGTtp1yk%2+47r5<1#+ zm`@58enOGVL6=pGS&dkQqEt~Ae-Vsl=a82GA$J-xkOdLEWWlRlA>xF%Uz0$vGW)Ac z7Cr6LNFTX)f^4-V1Iz|715C|ff14q37ETiH?-5vm+#3JoBycxqF3QnODJd})KEiT0K&Jc` z4b;Kl(F;eejSCB2tP{W88447FTr??z<${PcSP&SX-qX}+2?cwF$GzbeC3e5X` z7F)@{vz6~g>I(K77Jf4H0{Kcd=yWY>L2QScwQofYU5M;a678hpbg7u0796DZxU-<_ z)P$zV{E)v-ATVb(S_T>LhZ0`R_cg?e5G06riyQc4_;~{I#S#HH%cyNCa!9H{cZ9lRc;1SI*pTE*{O=PlGB{F`aWzhJ?Y{(V?)v*;P$2Y1ja zxB!p)zw)I(8*NIQ*RgOG2)Qt&f4Nl}A+bq01NdZW#?!ND(wF%3&F*mMp%od~+fyXa z7YztQQWY4`rTPH6_oBI`@K@t5w>L*P>e1$<3U97rToLT^m!CNr^+tQZ{462)dvS^&rXtY-j4G7m8mz@_+eJyHoy*a0x;oToGCnYeuolV+5d z)?YIK!JZcDc6+D23xI7YVO12_N*(KnXYPLx3TzmVDIVECk3FxB`NX&3%x`%WBej5nXk=dAG2~rb}zCE}OBPHP`~#Dl(S;LOM7j%8XIb*-+&F4f%f| z)$QkU_*H_M6Ie)ZOKqD~ znzvVD-V_p-*ymd)bm1%j=p*X0!25F#TNC_63^@^m&|emyE*AXrO~bDO#Q5e8Z<8=C z>?>@o8&gPH2Q7Js_*up(fDLMPeOKd!)a68Qvhz|;KP8}8g{3=aJr1mY>J!+=|bvnjE(&WqT*MWbM8>ET7`&Z z1W-Amvjoj?{fi&-{hv3`YXE@gdVoQBJfKij`HWP9D@wRts9Z2DR;efhqbV7w1iuC) zID*?>m|QkxvrW&2A1XsJ$0?x{$p|c%q;F<)wg$*8|4xUQvEuDimjD>P@J0=ONPJJR zFXLOeqmsaK>=`zw+ds`HbfnrzH9Gr_6DwSo0`Li_`M!#bVF+l;#8_ZG-{HKnpu(?3 zlZy2@mo_dGar|mvPkK^lKB=T-tRTp;%USiC-K3wA3zNuLI?OhCEr1IEzX(=j^43Kv zJaiLzHNIEds+SX?JPdSlgo^7YmdB5*^K7^~xp-pf%N)j+N#GZ;2k83;H@s;mr(oQe zWexyY=s23_YFrv?ONZYjru|eSa0XWal_kCiCEg9TsydY*#=F}rghA$gK>^t|UCCL5 zl}Vppa)!MywIKdV`2L;&kk%3R2q;F0ibhW`ql$Gc{ddSa<@3pH5=04X-$~7)XI-24 zqrIa)vZkS}lf&)h?Ymhd${^Fc*b7Y_q#Uschi(zG$ISt2N}lA>bGjI&WaJ>D2>+EY zc==fbvK_G>pRQPj=s3rVgoB`p&dY3aI0H8QELuGSK;z~iYe9s!+BjOk>Hzh>ync(n zJXosvok~$BVM!}c1&{vH5E(cHI}$pH3nvS60OC#lps*5bcHLmUzROljP-oEJ>2JK7 zH&uZE03ZNKL_t*Ej7*OzPzv7^O90!8Og#g*+D*3RpLUmtbcHN9QSLI%U71fC+Y5E8 zE+BH&)Z2?BWACO=M0_+?6nDF!VJ0nl%b}D0D;tLGv`s_72tMd_Ht`dstrR{aE8UH; zH{!zp=2)*27B^h_N!DSc=Qh}ClDlX}-(7>&6fK5`cZT*WSKD`G&?~^T!!AVPs6qGU zM*HK&g7YF+AM`Dt10h%hZ1~>}Fq&)*Sul|E?B@(W05*RT=sf_c_ZuG|s zvNuqh{M#!d7q)vN*WIN9zSDPn)Q^SdVhZxlSgBQ><=|S{#ebEeydWij_J7Wt$OPhL z;2;z`q)(%KpRexjR^CBv%2`hO!rbyNZYb7X$Ta1XQb^p+*&7e~4)pctkAD}SEYGaN zveuEF1;tVrldLG=5$Mct@>Cvjn1e>RLhHUQNddLLL6V;ias&J5wdo}A~z-vS%DO((DB0u@7L`53m2 z#=O^%ivue_7IY*xm;^r@qc2*}=Y;)4w*03&lJyq41akazlu+KNft$=|7E_M!3gzL? zUIVooFRJ_>5X3Whk^O}Mq!+=MCeq9cE4Wa-wdjIo@{UaWH^}{>xDUhaK8|vzuC@Cg zxm@<^mU)Ck3IVgoa22oIwcs!HS6`7U@I$fJ(F}Z!XnxUK$>&0Lwp-FSh_7a}JOXgM z;~xMspRaTeGH)+ZMOTc}YLPOlpJp4RSzO-!-XXuC7GJ?z4*TAVL-s-xUPoUZFQu)x zcx5jdj(DN@n}yZgeTIL7?}3jObBE8t@QT&Za0!8l^XpV9>qa-bUm$O`DX%ny*m$at zc$GwJ0Kr7Q7GXj35A4|5l_*4Ci2NozU3c2BWb6dX-h*lIYsQ65sx-XVH&`<}FO#9H z`~L>{)d5fgRr?^Y!dKBa7kb$+Qh5*xtgr!U$X?H+uW%N0_{2Y(0ixLnL$?O|8qjgG zKYcdKjwe&{nr!hSUu;endphAy_e6v#20?@lE4DMTNnj4ryWC%G6Z47hc8Pi8#Pz42 zp9OpB_ap#RK&!vgAuzix!YyXBD8{w!IT|%PkZ28B8v$&=(gG$HMD~ZM#1;d^ZUiny zpA01OSjcLujU&>wR%SSF5Ps7QwJ0cZVcBoq0f1T!+Fl68SQrB$NdeSxVLBuKoCF(- z%6HYhn#eCFsky)XYbrcMK|xG;lX*1m=e|z6 z5o>^C{ptYzdu9~2+|5)$p*n`dw+%o+1g|5G(K|3KB|^zipfrGdt(LVyX4w8_}cl*#`!%GoWvxuTmshr@lF!~g? z7Sy7;VHv7GyZOvh; z;?@*VS-|!u7CwmDrpg!!84%#tVvx3xe@3w2qJD+?TkaK8tYRaDXN5yaQhlN#GkC#X zS_mH&EOcmwFFlL7yW+z}J}&|h!`ZE?lH>NbpOl|Ti+t7mX5zq#D9*mgBPjyq9(t*Y z>m(~*aHI9S>mj4Q%WTJBZ$I2YIMB<`E^DEc^bHU=vPc$3>pU)77 z*^lLBdkE%84!HQ&t9|5w3jA>%@$5qnc7U&l_<6OWd+I%kpyVQ09ts`eyoC7yVOa=N z5d0$lo$^iosbibAu*DE$s^U#@7L4dQ*4M0z(W18M3>;Gc@xk;qV8dt%l$cd#uWh1NCF|LLUfqTr^DE@n@RJ(Egtjr@yh629vBaW>j}O#)W18@$xr=_Xku7rx zlI8GUrKp`$`8vyRRhLSwJvA5Gfd{Dnl^v z(E$md6o4v*q@N-X0fsNJ0I)4P#1~Zrq{9P$jlVv(F8(IeVk7I#L;}Ebi0ED(w3`?_ zI<{{TTWs&buHKhjIl=c5i@6VP#V!hFoj2Tz1rJSq4@{vqx!82Nw*UzG?ZX6W0`WkG zsQF6?7K!Ya1pv>q0)F^U6%2t8{RO&YsJL(57mus3+I_gOmEreVUZQ{Uz{WL!E{v%Q zz%nc3Nr1r%0X|+(Tc2b&4t-AZYChu1$=67GWfMOh+2|krNt&9S*(Rb&HND zsUGj6nL_cuoQ6*~OboM~Utc1@%g?UxadOzTuk5oiu%11SJu974#UHs47YZB=A_GId zrMUw3_tx`vajUiZr2#BG@be=)Mbt^8H2J5*5h28xdYuvfio)H8bzYkAcx_R0D%00z zGcIr^5yW4aHR9pL7ZLh`Q6U;1%lA@O*dmkw%Ct>l|5}12 z__)rAuQgjPKQ#AcZ*;rxW9`UIP7({(E@PI}CuIcVG+uV>ffH13dQMMF!V%MZ~RC4gyr@`T*%Ia7dgEQ9xCe>X1Wh`G_N|;(>we zc+b)U@ij%pKLOS->#qU}qOCW501>FD6<&5KK*pB}HNL3O;XiQNOk=69s17SIZIz$c z=6(xM)Yb)DtQDfLQ-*;1X{zBqs~icr2d+6UtMQl;ymw`OlAHwOSnbIJ_Hvjo+^91%B;(F;JIc z#(Lv%tb^14iTKwEE@?X?RXy|b5ImMch`LieX5)d3t%RYVZZU844_q4k;gvR;voR2C zVS(j{g0*dHY~2cV__4#YP5kWBw`UZ}q#mKCX7 zz#qQH@Rfj+bU}x)-H~KGvkQ9}&uv^l$YwXge+T8*sTN`~L-;k`310hL4Y`E0kML7AxN4zD=5;=rWMe7qsG zs057dz&^fEE7k#-WbJ6tA0N;qr+e(m*p?xCDx`V2F#;45%O`<%3Oh(*+%3eFOYARv zx*zd(VcEl!2i;*s(t#{7T{Bf_PZ3 zdub~W3*{t8x~=)%TjDs~v|ZIP-W>hWdaW%_GTpHSH#7b1e^p&APmJ_B43liX_$;;c z^!z66LuIs}fAkyigqXwdUTqG^vdrO`gorO8i+x&xq$A|_LVv!bNbs8l6;}|i3U2r( zRICc*29e%F^x~(}KL}N8H!WGf9InVS%!r+M7ByGygw{lX6IP*k z=x}-m6S@fgF8}V*n*AZuwQS9Es{92WT9S-NfaQz*b{%0O8c;WJIk|9iLm!I$S4<7A%Q36T6 zbFXNEDe@2mmp3ybl#f#Fths8!49`@8Xbl_bj^POky+rJ(9Z6{{}ztBH19AZ_-A839w}^BKR{P z13zY=f*lUOh9fr}-eYVcE79K#*@6Gx=|A>z0KR5r8SFP05B(pH-oyoI3_P-lkAWW% z(+;(kPJf?x4T}kz0%86A8~RuI2V`7u@+4mNWx;Z51BZW+B5&nw^bh!r{=@yTQQJg) zW%*H}{~8#;8Ykrq_z$qdYm({Es5%7v$J%5;`VowQ$1fPNBAa2q-12$KtNzVq|ltE;#dXl+WwT76SEepoOB3NZ(sEe_~ z;R!;NK3-!_hM)YHn&%pOU6940gAJ&O%fuM_JRlT-Of1I&12W{te4De%64ToGweog}sbv_EwKj07g;|u*q<`>~Ao}UgV%sGV$ zkR|;Q$iST2H!OnxNANMpnOn3#`nyD?00YmS20LX3y~#f)4PyBBwI=`WG7<8xhjhn) zZo<0!>mU)z@*@M5NeoZsB-wAhYz!Q)z)VjnmTb1bP*dJa97~93J9?8hqE;u}`LmY?Y2lNM!ogyjVQq{&u)Qis&cjF&4f_ z#1k$aW@R2Zp@wGZ+NNjhQj!w=&m^kD|DFCV_B@*GZsP<%$nb}3uMsBx{Ig0|Qg)Xg zgi*7n2>v(uuW`P3#=sXF9_sY}A^6$0#Fd%hXVozH7jb9j5RQn+56i!(nqxcRCfT1x ze-GyF0TGz~A=079I|HWZIpT~kwjnj z_{s*?s*Qx3xZJ4hHNI+}OqkHQzh9(#2f)3VlI}n$@t_Jz`Rh2?jz+=$fIpNlnb#0; zwpnEQg^2J7FfnfMV~R$^=T?Bps$wr`0)s9yv4MB^F`xv0__l!mviDLI9=o65kKVQdx5%gn)u6CCu+yl0IsEGMUlVeZA4r8v zCDc0+e4krT4yenIvOKsZ7mFD#>c>SW3{#X@w&X;J`hiZ!?@fHY$^R?;dlHu6mmB=Z z@~qXWwKxy~yLih1;Fsb%|)UTm)BEp=}`Mxw3!G zbKUZ|kmLRDD*YOs+Q*{sf#HvOXlO{3caN=fxmo1HBdm_DG{dhBzYfR@D=Zigz{30E zwBC)WjmyElL2I(2uFr_<#LcUD*}=;1&o1w6jHDq~`0cv45g88FtKTgM_{4SCV#=sO zpW)fz;Z)9xh2W30jQ{C?&LfsxeuDlAKzxFv7*Q>-1HOosGeTy)ko+u{e?#pdAeEFz zGW<#euk6V12mRII_ZK_v@CW^o%jil&KLju<=`mQOCdZ((5U|L9z#n`Gt0XQzau4V# z{GpYn_{oDhM^3(Yqd(-djFXhvyu*(fnC?}bobi9HMw~_1Ao&lv88Ck$o|DZEu*1%` z7TF(^`qDf6xWVu6eqn&(6UrI(Wo)w^lY5*h#w6r$P84&qwP+?EctD)_yl}{k(P<+U z;Qeo}&Kn0pG8|mgI>C;Mi$)13+gt;6+~C)*@OL3Q5zSw~9~s!0O^q6e9Mps`jk?~$ zTBJ;!Os~mC$%9nk=BH!i_3rX z&(tpTM~1)9UmN@(PR2MXD7?$R6iO0c$J&r5P^z>DAue>k8U?J$5(W9n>IfW4V+ zw6`SXH}C;|0Utm4MJd>a{>^+Yp0W+U68}C}^I)i$Kad4c`SbsVZZIJ_{MQo@O+N(i zbSyr=ZwEVyDvZvgTR{Pa|KoW5vZAVuJO4ly1oG$qG2n$@lUsP$>CgP)BsX&2e;o2( z4f5&N2elu5CD8d0{LBT+2eKfK{P~}tL}6xlZl<9k#q#qZvIpg7rvERZZP5Qi9R_xj z;Ey4U3l9G>`b(yN0rf-F<_1FW|H@Ql6au3TSYb!uV@MH;v~eNeAM1ZerfP@3kXl-s zXvWg=i)4wlwPV4mI_NIjogCWZgJtkf{XHA}1+YnS&MoCD{2}naZ}9(WxXetVZZ69) zupsM$Pq8^yg_W1KqFdGn<8S(p-wioJ!)pq;o7iVT+!_S`JL`k{u1(L;pld%i1r?-@RVlaOk zuuoP|B}6)ERxH9?80x!849%2W&I`$Su?qP+NSoksBD9GMpDRJgU8RyBQIA!N6>Mzp zwT_n0(@*qSozOias-c&tMy$y*4K-1(WWIOE(SN<{?daTm_i9N_Sbk z3$3~+GHM%2gZ~F;!pwb^pI5SUYshq74(+54*@1N`b*4zaI5xAHU;2Ve$16q@5-O&W zlJM|TLHyofmz?5eE+|kB+_Jb;xzUy%SsDHihDE~D_ht>1xynh$0xK) z4{Ed+>-arD=QhDtk|Oi_sugiD0=Jbd&K3AyB$vEV74uiFBW^Dzw!d})okqNx9@YfC zaYu+L{H?&N&KIY?vcnD?3jj|Hwc&?(GNty+yDw@P{!%lQ*s%qi%#j%48iYnC$hSrl zCr{1JRvb*Im9ZAaJ>TVnja0N>?QRUg-|{L^iwT%egSdMTam{CH&zpoW*|o_hvmo4O znEnoS9GV|5Su79H{aGS<>l4I|a`)PvT}9Yfu4ia&9Or}Z;$lY0SBiN_4q*MNCWZ$K z55w2dfMX*vNa#ZW?6~fT7hZ??8Yf9u&F=5dsjm&$pVMKfM@0xJgvDyZ75R7^Bzyev zi2iA66Aq)847O9uzm3ui2PTkd5nM5vOj3p|d@zW@;*d0iET0ZgG{%g z0!P>eAQIax70C>M-$WGoBGV1;;UJp`VbTieFsDLhEd~D z`ix2pEsf?YD_Bp(K(eI-!;@8Ok(h?PknG+#`sUF$oWnooEx;K1RB0(dmUy$9?0~Jf z`F}3M!IrqM|PBH+Us!TktZ_|t-~JQu~`gJS%q|TgsU5w_)5ZdrPw~U z{A?mJhe(aBOw@TURK zE@WjU_V5l4$tC@YBR|6(Dp8W%ISU&{NXsRm=qc=E#q^Y? zB*PE7h31<%QaA>B2<&s-0*n<3%(%0FvPng=Cfc-Xks%Ayzo?&2euTs4GOm#Pgi`Gm z(;b*rFcu9egcscF#8GBeKuB@vIj$EkUb5AmWOPe?jx9g7B+gmyXt4DO9M*!2001BW zNkl@v>z}MbUj8C6y*bFdpBfFiCYl2;y%cevk2ko}s^! zhDIhJKmHKDC0+sX2<>s(5toq-`&JUTA}4G%l}~fegd{@0qSQ$Q6b_tAEN$Rf&0h^vfz$jsjg>|4RW{8C+`eEq`-@tLk zB)p;%B~q+L-k?996q9ja4}J__oM2kBgfPvM1kyQXRX`30GGgZ*TU;%b1`6$9YHI@ztt-B(9&Y}V zhklyj4ipxlR>d4*`7im+h&OrY|Oe zxCrh(>XVf)77;yUZa&%QBU{o?n@?j>VLvcno9s;WRUE+oI>0cSBQ)aVXf%o!QwYij zO_al!MzuFqwi{KBl$dgwfx4o$SbRmp^B(Eny@eo5}M-|nry$cv| zQvJgI$<{$$P8bHRN-=Y6E6$v_p+R6q{tWn()tKHYtpD#mm~RIk|juUv3= zXew2kye4>boc!vGD)(tq|mo>j4GHYBIFFgn+taFPXtC9zmdpP%Mh0>)suQ2Tv4 z2j+tb48eGU{z@N;Pjxmzq3AQC#xxX%?I&08v=N_EA1>3!tsj3O#XB4yS;qknnjvXW z8MHr?-j#zbU^hUFloDlFwPHlBPN3ao7A%Z0mSDFArUNpX#FrNW0HbQ2?O(Te6h` z9E^tM1`OT5E`qK7!QG~#ma#o`-Jp&5nlAwZoUF`16SSQz81)+8(vJ?S%MoTST5z9V2{ zHx^0EB7lXEVkQcaqT?uV&GQmgQYqaET=*E#w6>+$iy$eH*CLj&$T8!j2~5AzxkDwO zbz0>1WuWEsXX<69B^?d~Ic53rDX~)4noe$$rr-F6%P@nlCc?*5IT6BR^_%$uS)pAdoByIfkOu zlLG%h%%1&4F;V!maOUjW!y+F(je+^ct0;i%Hz7}VKHf%H4oE?futyw0a6KwSDx^Fr zU4xc71pF-Dqp2>4CIB|%JBVKR0~H#WgBb^|pb+dpmNTJ*J*4MXmZZqKn7JjlDDhN1 zBv8O!2AY?RvE=lF#fe2Iu1um;hzHxFG7S*DDIpRfu0*J0dN%w}qyjDj&05MH{i;-; z5&PXm@K3_1_YAr&B!?k>`*8Z>2NtsFtrJ$sN`%Z5ZpLw5F{z8V7B;j{tnh-3egeSY z*Q2dpAP>G>jVUV#htFJj4#UE*sYgp3260eT;LlR+dvAKyqVR9|HrSp}F$p=0Oc|PlDpYyRxg7Rq%CiKeWR+3j1`mPWBR9CJ3 z&&{LCE&;_(8KS?Sy1%iyKl}-P;I6t|hB#hex#>d5F>+v}MYx?r);JZi&H8^Vf)o7> z0aSerUlun->NF^f23TAv1vg%p!IOc-=`tf8Dxh<-gV+Q||9xw-eS|mXJd80?Ci-SOCDIKN+4U zD>>6gT!-0Vi}B#&8B8(2Yy%`wril6*@MtYL4lBk;b9xuBjC?-wh%gik{z9F~s&sU+ zzs2uih<-><@zQrZGTZpYcGcnjw)_L+Ku6ms>xPRojvy13ZxwA-Z!=t*;%*pl*n0pc zXbdiy=x7~)4<+H~PI9^Pd2%Wc*9Tnz*`{J@3$_r&CrT*5$S=G)BK`?zD4>Ez)9SQh zm9*i=W+azGV zPHqZA`EUHI1!!A|J9QUuOY~d1BdEeyjMhr)6PpF$0|wz6;6-R3s#`#ac#s8241z5` zF5=OVWP)k}yGRbVEtaCkK-S45w^!h2vV)!)4KD!avmhnzU`CL^&?fV%nJ*rqq%7S6 zMBkFTD*B78{tQWSlDP4&9qNKHTEN>+VvwTROzq$-+s$ArbGA7!d-5%a&r;M-xGG9F zL;)mY$I8yd05})KNy`RmxgnY)cmO{5uN-)4!&RdT1(9Q9S~G9hVn%=!Xs{#V@GB}O z>`NjB`2jvq5nv9?AFn}=Z9tLZlSNo6;Pb0&K!r%w%}{`Z; z;m3ZZIqJ%6eUz?vG+pGF+4Uw>KOXEBOH?9C@zI#GjsM|;u}g6FDnK+X>B~wTF@z6M zQuAUeHc#<4=&4@RV(!5E3|qhGL1tW_IY&&Qt_vwaCpUYf8BSy=;x@>@7yDMEtAH8+ z-|Ugq59JubcoKkBw2HNO!G?ul&_!hGb*ls-a-nk>Wn>SH;{fDL!V~3VT~xo1i!t{^P?sgzcS%>!GAB1&fYs(cj{(O8gzYjV7NQE!pwG@0+-n zs3spAN?c>Q;z3sKN#9iz6nFD*a)uatFXfL3(9fO&h+YCRxn0lEs5IZft2x6tX_gpx zSQ^<3SEYLv7ANiiVgf*RMPuZqor2VhQVPQCiY#vBr76sXZckU9nG7OCE_{h51-^_` zD&!%~vdxg`LxEp&^Fk$11J>jK;Gw9qND*2b$v_<&H*6AH6a4W!fE86xl%v~0%~JH_ z0Fw#cp9-DqL3BUZM(-WDZ6#$AFXvj+54CopncYe;HF9vK4H!OOnH0s7mCPiDb{nKS z09y0_UNCR{bYS>O0eg|atCNwzlLjsKz~&BQD_|%TP^cHxG(Gml)ABRe!ERVG09{~8 z7+U_7X6wj>e$G2*e4oOti$m?)Pv| zw>bdqP18*VyTmJ3yb6$AY?XRLdlpU{4zlg|oL2?d)0O&;oq9=K6$)KBehTaxu7k-hw&vIPbCh?84Mrc9{CC{P*DPy z7VJtZ6RW3I#WR*LGg|UwY0=v_Ilzv{U`EoI`1|4&f(6K_LWoD*0qaZn zoQOR=0GbKHgK)72jDjAp%drTO5=n%I=CyN5;o5SIh(c~ebsVdn!*)evoVih;khcYA zV;7c$VJ_iI(OLFz0e=V&gSvQ4V`D8!iT)V1F#X864ww$+w?4j({tXUfnik+}xZB)7$X0-%e)2TlhBTG(JkPQHt z(Xc6uNMQ<^Mnql}SeIa;tiM{E+1MJ&@f-`mvMB9h!HxiXi2IyKX|p~r-5d!Ci^&$5 zwIDgRn1hKfSP(fdkryB`k<0a$2&{(oY`P=d1P#klJX{fuAQj;I&##9PkkWM14vyYR zfRA?N8uUp1%bimO5Zx5VSHersE%o?L&xS%p?$sr`6t;@E2cAL`eQb{uun2zdpfK&j ziu8T~Zc4+@mJ?5C79e^aU0IC_x{Q}(4Jm^}QNF%jIDoPIT`EoL8VO!u2Pn7>GZfZ8 zFQZ-rPpl&@z=J?y`4ggFiA@ftOat~DOpbs!K9~OF?Yt>A%l#s%godp45bNq{r5%1D z-t|_`B7$OfSn{J3c*W)Iii5;Lu9+{4y3h&o_0nO|!YLR}s-XuK97bIavQV~t5c91V zpleKRKg&CwSM1_gB#Os+vX#HmRvK$=tx6KcWbc$dZQO539+MGap!%~u2@E1$aR7xqg%mTOV}F^s1P47_2pX}erMsKXx_v*qFokUHuQn(QYlKc* zR4a;^OF$Mu{~W#l?!&r_A^i;eo}7}GpuWJ?BR+1-b!ucA>dQb!$~~hU6XHF5z95o= z9tMdmV9{|NoTmrMEhW_kuv2+9?nQHzy5Kf1NEPd>`5yaXTT z9lm!TR;a4^*1A2lZrWCQV?^LAV8#Dj_8&pkSRSxb%Y}j!G7|9Rq!*Ii@)J4p0zDqt zfUtQI%K&U8<&zLCNb}Ftt~0h8eqgtq4fP;Q7@e-nXn=%y@??Jwd8mZt;{{L3?#xW; z{xhH0huNjtM_eV7pjKzu9rk8 z8b8Ij--Iwg*c`D`KDxNt7KR`1tjWiluPV|~qFj9+F#e80MP^CB%sGW-y#5aOQhu=G z$w?&5nvl-85n&`)D*{HkRC^!LQzIDHIy<0xLjS{TcGTayqGbna5M4H$H3`?^0<7l>tU$au;WR zR%|z5s!a!YKa{={Xh8@rw;r*<67OB(CSbreQ=2#j^oxDL4Z(tfY~aOtv9h3bkGtsB zqSWCh@8A^6$%N#0BIdE5G^UUh3<;MEpv$SYZi?wqep*D&I~;sc_^27x(q7x+Df}6> zp{79iEr143|M|rXDK%7p(ih(AYP~#t)v2(sVL%-j-_MRQWYdrKlLxy(KfIZckAI&W8GsFMc9AV_u+%ijr+AX-f3N;}l;40BZ0ngff8d#^`D} z8X|(Tj~1v2w<=3rkzb#LOP%;@!v$*r{$TuqeN@gn){f-6J-u2KLdqrv{_+S(D=!T9 z3PQDI1CSiDG33?OHZ1sD>t$pI);oAR#I0Sa`GDj~(eAy0_kVB{-Rw%*4=xynz$D|7 zS6kq$0@8QkC^oxf>BXLgKq?d(foH6gcyfZaXmbVe#&$%dXc1Qq62s;I<^h0LlVo5L zs0H}kSSbjVT)^f2;<<-H8*UTz=tcg6YS8Li(rc2hs{`w}yj_8R*)6LY8&D3MxnZGM z{Jh6L!+e;pLJ4Ahv5((z>4#oXv(zdivma>(OX4gQtJD00pneJ`tl*azBH>#vXE z)wp9cjP`g2|H?WjH(bEXYdz8-*bac7TiJ{*y4aZ#;U~)=>+jfFZ=d)c34Zp$#jsrS zfO3#GjDHM2Q<#ery*jsYKcJ)^%mOQ62^lG-#dsn9|FvV}^YL;^&fW2;-(g`hq)DuI z8@!)q{58)jivKLn#W-I=ljuRQ6e6;A1X{Alva6Qf#{1WE0DEbK&L%5*%Z|$?YvBVP z-;6T~R0^o=u}$ApsW5b~;p4q!1K$2Pl&!{uWzP=zN7g8z%7vov(*tn%*FiDSSVJ40e$g z4jnjcXKfc3`nc|s*yBRO?z##sx_QzAjq2eYf8>W721uZvgTVW^0iD@yT7zHkDq`$r zibmA}|K~>e&IneG#+@&2tbkAmRwlH!JsLcdWnUzyE)4SG#7Z$}20_@4o;0J=5m=P+}z&SoX~9&C|065up<%9zrr`DI}bdP*&|s#?g>b^`aj!oH^hdyPQD^yJxE%AdJE z759)Ke;`HT$YM;(Qi6+v%uOtnk7&To#{m>WA2hQkc;Of`fo_1i2P(FGM=^yItc9Y6 z0UA>6u5rw_5p0(GoZ~}>w(IDg&l!&!Nw)U z&$GzR%u2o%X0}ho5InnpbO{I&$^Ahjmj7VvnXlZK zR4wlqI*5S=~Te`I{FzNt`2OU4bFBmv7P~?nXZIpJ!=<*C8%7U1%J24UkbTM11 z($}UA2j{u{BKpO_jrO8Y{39T4b$a(luTyzAX!~UFHYCyr*3x*rowG z(shX$SIMvZ$@{qkL=S@T0%X(!wGuqHOh{DDoKLMy2P}3e9mZe4If+I~H81?+z#@QP zNX5574jHymbl$@~9GI_$EX!FfIqdq+jQ{%m2EOi3LedgetD>I8^eT_nJFIkA`AZ}& z4AhhKUW3<`0MM%-%B&Y%W)HX};p)U6)~#%1(A8SZ_*Er(Vo^0w{4rS1o*3^4abVtJ z;|M@&vCS)|_=c*qeUTvB6^vjrcaRrlexAy*)hew#0q1!vw+tZ%!kdtx9T<5w*%JPOxu8{ed{|N$ zDFC?fYoxG>DOxfcy5dz9CtX=A`;l3yUk+rVkcW)Sp8N8mv0zCnPLBkRIeobzXmw!9 z|6+RDDW>#v-;yCSszay2RY}SoAl_vpFVk?bsvy6mfLDDBD+gK9m#&0ef=fwy6+~Q6 zX6L~&^~8G+(LwU`ambAF1K2e#kt6TVbXbb_N>2Jo`tLe-n*%B8>ZwA4Tx7kcm)>vaW0xDJ*^Dr30wwr zE@Z{Z6+{>z3;nI=hyLI7##G>td7eC8@;Moyz(5txOB951hC{nZb^yBVu3j@k>uv$;I%-B+Mfjn!nn?E*)PnN&2 zD5pOs1L`*nU9>GyUhHO@veD#>-Om>$mPA0>;!d75P9U|m1pu;P1 z-p%NXG11PSbch2kY$&g;He$pS#@ z1a}OV7H(8gq&G7-mHyh1THzNtQK&XG;-xbPbn_m-nHM)%2)^=o7~Og+O|s_5;Q$8Q z@&IHte+&?)JgM$=snhGAxw$j1GjJc9Td9!ITA0xyKpMX$le zF3$x0(9{)9l)}K@&cM`a-7OS96jmref^f2UzXadG7`aK43hjkMO0M*EPafkA@K+{@ zsp1E{UygIO+`zM*>)o;*NC8?u`X~`^QVCw`i(H~JeKoVwsIr|cKCF5~XPP$nOILK2 z8$1~xEJUY3+G{eWbCGud2|sh_`YrRl4!?o%Bl9rwEI%&b%h{5OMKp}l3!#>4g>LZZ zwd}b$$NcvxAx;ZHeDQ|!8{~9omknLKzB}++Cs7RhDQO=fX+>p_T(O*kF7b88nJANC zFJ`Uc%+#e6-vjW>bIZtMbxT*(iy>-diO|?|D|<(U_aVi|EVV2`2UY!Dqpx^cN2{LT zWy((iw~~RW9$^aEGnWBh*_BzS_ZVBRrCu}Ay##dl;{ZSracM$Vq6=UXkSp+bSfrTS z2&mPmdkKggSe*OH4y1K5tm-^od;C*-;k5z2?#&d^uNS|v0kVuLGRbiYEd_HaDF*U+ zQ_$nUxT}+}fG+jiaOQ26@@EC1tMGdP>q!u(3bUAvH5(DLYcGamdsyP%FN5XyN*$R(W49sW?@;5p_Y+aON4&=dZE*RF>m<98`h>bP<--T>w! zbSu(ZN7JZEnE{kvZfGHhO9TSADup*gS3J??R`A46n+S?LC=2u~atybkQ54mlZ5Vy* zv}vP4(8*Q{vK+E*+IB0!hL#2FH_fDKLo8(ATy1A+CGD@+^505d>6Qyb8b$_+g~oE~ zET$BC5hHadPUTcM(G2?2SSFH5OuY%~0u4JqF{TC#_h_tDg8=W+@O?CB2BQ|A<@>!o z+VgzEIw)`~)s;!h#M3Vmc%KcJAa4iJ8nH`o|t;m&3|TstBs zK$FPNzN$?sJO$WFTYnVmLP%KD+(_k;qTvtzA^;nZMcBUNDwQRHxbl_n?eHT->;PC{ zd0&>2ws+FBg}+!*_jH<|AOifwS^#>F}5L=Nj#{8e-4weoQ1*fDx59<)^tV7}iV zp7AJq1`0Np$2m$JUtEPrb+HlN^PP<$a-NAI{IiL&fe%?$if`tSP2-4yUmXA)`rMdX zdSk=8`m6Yr^VcDlt7r+NhYv0Otk^z%FlJXrhhKN)Q1l&o000&CNklV+H{Nr9q>3Jvz_zqm3kKJrfLVil25v6{R#+X~ONIQI?bo4{VoOLUhQS3|c&&9l|G0N1Hmp-mkR}bBh@P`AA zCd!Sw7zKU6$A#VwYD0ws_B7UdEJ&eU@C*NW6^T14zHNvxid6R97n~5HaIZylXZpi@ zitt-z8k6w9#op)wo^iC!xEG=H6;(WA^5o+bySKaAz_Se$`AWXS5St@}1t}G~xo72s z%RIB(hJw;&mNOXdMx+goHfcNjk+h9BcKE5&&fxz?LS6_dzQM_Yy>S!rr$vk*Yo`LSi%>!=5J^%S0aA$PR{Q8z>ht34DuH zEfH2t&&yXjk~11|K-kkKz6a1+{5Zvp7H{z$e>p3OW50cRDzMlvo)Y>v4J3=bUigz_ za`+o9eLWlJO+xj}^18a~mAhY=A-d}Iim`{yod#%rmLz)Nw}_PctI^G|ASzOM0;MG< z3(F=b%!PjYu-1VU4Vj|vnG@X^uBs-?bsWaYBV(!&2@h~8NLmISKvvC8M93!6pPtjj}8P#b6eaWeW_rLSZ_%iCHC zrdAbI=+)vZtoZfcjKQC=s(@< zckAFWj3HAKrmOYD1-}E=-QX*f?h@1TjE>I-yUPr~Bw|q6XoH4AY39{Vc*F-=zH~)Y zlAl9#n6E5+ZW&c3YT8IazId{vjUljbY)c2AhLYLKkMe{++uR{o(ULPO?^av^j}mb0 zYH6u=0Ffy&KLmza3hrP}#VoN7WHirfgsjntgtG%|oz5~~jd`6ee4SSKL9loD`;WqV zCQL`tNGl?;ayew;w=xv(iU3^!YH5nAKNrUM{7?D<@%NnM6bzio-M6si{Htatn=lvT zSaa~EV zr$2x8J;V&A$S>o#N$kHTvyX6u=BFvy0z$$FL*OcAAX8Jnk5Y8Gdx+fH1U<0d%Ti zcoHGrpC0`W`fDPfa_)@*a59tu;42@Zb5xqihm0_A9@&6ei(gG4sG-&$aD8>g8>NPn zJo3{Nt5r>htpga{jKpXCGDkF#{FMXP5Q6k&E%KhsDNLr;3_?^r;xoz*FKOwfLSdDad_6 z!k#GbHvk(0=e_nGGMH;BQI+0dRpM`3inqxc-q*jT+iyjyZg}b{J%|Kb>PKnpJN$bN@KpY~c9*5W%g?N?YIac>HVzfUE+^dsR1Z z;SUE8N?Ulc3hpAck%DCt&_2Df&S)H3o1Y_K?7)0+H+vO_f|_PQa_YO~Kd7Xzs|>(4 zrJbN4qr>(^xwLX-=x$(?Yd?w*o;PJ#M{$ooEDK22_{L28C{Kx+i{P$ESP}L3054_9 zV`U#^!Tdq6nSLITz|RsjC=ZdAjY-`*wyvy~tTF>r*+w26(2(&}EIfba30j@qq%V9e z2rWK;XcQLh)O0db6sJH|U1LJI& zfwlO%OwoZ}AAB ze3SCKfJ!Pf#Hf@V`(bykq$?*YiPzmPSn9xC;L|~z-x&5ppi;5waHMv|0~(bpe@^9T z4P|Hg9|~kWI*}LMT%&Y3;P@?mmo-@@o2o6V`^l<*jYd8t+5nxOmhXB;a0wt*R~AbY z%n!GBWU33o&Q|tuvMqfuGOKR>^Aa4Wah05qDBV4&*J5$GQqK)IMt?BU!;yX#?)V3ph|5U$pEr23}IuS1p#Ja&%B`Y(wKHsFw~V6Bog)&qLm{T zqWeakWx9Shsc5W_e%;TljTyOH9_au#=y4_p?h=WdB5-^Ow^^E6>j3xbIAI6uyUqYu zMOS8`*ia?*Iy?mJ%7$to>3}5iVVIw}LS}nW#mE%7XXoTOO4=LHmYib3tV$0Pm>$K@ zcb-^wLd^f7mTZZbYPGE^{EN($EswR7CKG;!*B&zL*hH^5v9QRTSI3aakFVdH*>#oQ z8^0}BUEDx{S^|sm#LOnW@M zz+zpc$b*Im#gMVK?CriX%7Mk#VX#vntZC_L3q@lAp5XYb4<&I@GE_C zy;uH2_!;p=7?1>zcxdfo>xJ@p4YhqnK~h0kgTg9zVW(e5xXn3dqY3r zFRvP#4a_5-k3p+Wvb3~!CZk$rP0|ytsa!3*UC>p&Bs_(2vU_yKLTwP=@@4bQ^Y?=a zTe*!?jOj{pjHy_pLEV*K?kg&<8q!EyZH9L(tfbw6c{8t!XYDtbZ$d>BM&mCxeL6L} z$FC_52TMNm>Ok(9mMC=LEz=89-soVpul?Ui^dxZWFnKU|<9`t~CQp~N{>XZ|V}1`1 z1Aw3B-L}mc07R+yuqbVe)^~n!kjpqpAbTI>Gq4k&oCu=>VQM;?ilmzX>qp(^infK&1*`L`95_idT!O9Shsw_f&Sx z!$$Xg;5Cn~HR!jD&d8XXwxrD;d4i1(*zkdufFR@k=9h(^ zRmm6mzn$bc0JYa994|6=&;kW=Nr~V#4AsI;=RI~8grJujCgl#~AiCP1CY&6%z{@`E zExUJU7ZoD+3*$)d($UK0kOyL9*IQ^Gw6FJP@D?zf{-5)(=S}uyGo0m7FbOO`TphId z6G~pT;jUmaJZ`qwR@`yuEHeN&PaGp{*Z`StyMUHNgkaI z4MK{GvQKfni5Y@4@)6R8meRS40WDyBamY3lIm12vp3z+BMgNd_1fT^8gH#gO^gA`e zV|U8P{yXVw9KXiIFyMwUfHAwZd*`JAFn0pui6tqItMa{(!B4<$gB?%0lb{%aYge85 zs-L^?jQ%Df2LKccJy^!0XeYRKV)bK9n3T(td<7B-4cni22UpdsCHIl3j( za#t#9uHd!ezE^dRIX&HkS1Q=`7BHM=W9rieeUaRM+%xl%^Um~n+C>I&QRoV_4m*GV zAN2B*&k(Mb-(v4N68L374?;E8t2o+-5SC~Df7AGJG3g&vN4T0Bmm3HDJt_c9=bf21j~UXwdUbcDst}tFwMN4**Xg z-0v0a_#lbTWpP??8~*+D5U%)MKY?G6#;;&t&Rh$OZa03LKryC!dGMbHkKDL)*WF;S z^P&iV-E=$LuLj5C(&n>kw~`uQ!>Q57Xp*Bh8uB)2bd$MJON@U1(1C689B-pec z<(=!=Bp2x_uI28F*V8QxdKsO)RprB`mwfiO(}tzd1#%Z{p7~!;8F8Bj<>i}2{y2RN zd=HF)@5@R~18UQTo(IN&+ig1I4OD($82kt)VLo63=X4cg;53Yjs=1*s?kC!L`v80P zn#TjC?vDG)*hK&*ZkWM=(=lw|R|dv2Djxvh9|O<#=nt3y$8~C4<WFGfORr_i8eD3~i?Bj}3#uoVf zi2@(|NVl?Y{4au(y=|HtdwxOdFeayd+W}+@*F4OI?RMQ2L%*qmjI)jU+nR00x;xu7 ze|?!*o_1@|wl0~wi^hvE{WQG`BPIhp-P>rvzi|vDW0~zBBl~-)NX~p&mIG!oLk(;Y zsOo)@O~2E+?mjO3d+BT7ybPaBD?h`3n-=G^Y1yZp4fEu7uNs}xfDXI!mks?suG@t6 zxO<SB8Yle+;5G3czK0um1Y*+Gum&8SW}!@8S8i8pQC0CHjQZsyJL^(zeDMZQm(^> zK=5OMpCj%x?XBs-U&4P5_U|}4Xqjve{O7DTrbF_M{~z&t#yXT1&mTw1n6}=bxv&1NaC-uPue_ zz#tPlU$EjYr7yc@03P}o_L>h3$6VVPtBZ- zjGtEX00jS4e%RKZG&G1MM7zTvX2Lh||C|(I;`e_OzfG0Y;RtSrYCqH&zKQC~8L<2~ zyWS(^e-A%0{$H8l$cIVxu4&WL)SVv@Aq90LKR#e<`TtY=eOlG{=)fW~KKAi<(*Fao WV2kfaZkx{l0000 movieOutput! movieOutput!.startRecording() { started in diff --git a/framework/Source/Operations/AmatorkaFilter.swift b/framework/Source/Operations/AmatorkaFilter.swift index cf645454..2a6d507e 100755 --- a/framework/Source/Operations/AmatorkaFilter.swift +++ b/framework/Source/Operations/AmatorkaFilter.swift @@ -11,7 +11,12 @@ public class AmatorkaFilter: LookupFilter { public override init() { super.init() - ({lookupImage = try? PictureInput(imageName:"lookup_amatorka.png")})() + do { + try ({lookupImage = try PictureInput(imageName:"lookup_amatorka.png")})() + } + catch { + print("ERROR: Unable to create PictureInput \(error)") + } ({intensity = 1.0})() } } diff --git a/framework/Source/Operations/MissEtikateFilter.swift b/framework/Source/Operations/MissEtikateFilter.swift index 05a2d260..49f38efd 100755 --- a/framework/Source/Operations/MissEtikateFilter.swift +++ b/framework/Source/Operations/MissEtikateFilter.swift @@ -10,7 +10,12 @@ public class MissEtikateFilter: LookupFilter { public override init() { super.init() - ({lookupImage = try? PictureInput(imageName:"lookup_miss_etikate.png")})() + do { + try ({lookupImage = try PictureInput(imageName:"lookup_miss_etikate.png")})() + } + catch { + print("ERROR: Unable to create PictureInput \(error)") + } } } #endif diff --git a/framework/Source/Operations/SoftElegance.swift b/framework/Source/Operations/SoftElegance.swift index fa5d4f1c..8cec3af0 100755 --- a/framework/Source/Operations/SoftElegance.swift +++ b/framework/Source/Operations/SoftElegance.swift @@ -9,8 +9,13 @@ public class SoftElegance: OperationGroup { super.init() self.configureGroup{input, output in - self.lookup1.lookupImage = try? PictureInput(imageName:"lookup_soft_elegance_1.png") - self.lookup2.lookupImage = try? PictureInput(imageName:"lookup_soft_elegance_2.png") + do { + self.lookup1.lookupImage = try PictureInput(imageName:"lookup_soft_elegance_1.png") + self.lookup2.lookupImage = try PictureInput(imageName:"lookup_soft_elegance_2.png") + } + catch { + print("ERROR: Unable to create PictureInput \(error)") + } self.gaussianBlur.blurRadiusInPixels = 10.0 self.alphaBlend.mix = 0.14 diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 87280118..43bb5e23 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -7,7 +7,7 @@ public protocol AudioEncodingTarget { func readyForNextAudioBuffer() -> Bool } -enum MovieOutputError: Error, CustomStringConvertible { +public enum MovieOutputError: Error, CustomStringConvertible { case startWritingError(assetWriterError: Error?) case pixelBufferPoolNilError @@ -51,13 +51,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var renderFramebuffer:Framebuffer! var audioSettings:[String:Any]? = nil + var audioSourceFormatHint:CMFormatDescription? let movieProcessingContext:OpenGLContext var synchronizedEncodingDebug = false var totalFramesAppended:Int = 0 - public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil) throws { + public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup let movieProcessingContext = OpenGLContext() @@ -112,7 +113,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.movieProcessingContext = movieProcessingContext } - public func startRecording(_ completionCallback:((_ started: Bool) -> Void)? = nil) { + public func startRecording(_ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { // Don't do this work on the movieProcessingContext queue so we don't block it. // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. @@ -143,13 +144,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.synchronizedEncodingDebugPrint("MovieOutput started writing") - completionCallback?(true) + completionCallback?(true, nil) } catch { - print("MovieOutput unable to start writing: \(error)") - self.assetWriter.cancelWriting() - completionCallback?(false) + completionCallback?(false, error) } } } @@ -297,7 +296,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Audio support public func activateAudioTrack() { - assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:self.audioSettings) + assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:self.audioSettings, sourceFormatHint:self.audioSourceFormatHint) + assetWriter.add(assetWriterAudioInput!) assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } From a23ff5fdae70b01791022ddf1560ba81c8408e51 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 1 Apr 2018 23:10:17 -0700 Subject: [PATCH 064/332] README --- README.md | 130 +++++++++++++++++++++++++++--------------------------- 1 file changed, 65 insertions(+), 65 deletions(-) diff --git a/README.md b/README.md index cdfa34f9..378004f9 100755 --- a/README.md +++ b/README.md @@ -216,84 +216,84 @@ To filter an existing movie file and save the result to a new movie file you can ```swift - let bundleURL = Bundle.main.resourceURL! - // The movie you want to reencode - let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! +let bundleURL = Bundle.main.resourceURL! +// The movie you want to reencode +let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! - let documentsDir = FileManager().urls(for: .documentDirectory, in: .userDomainMask).first! - // The location you want to save the new video - let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! +let documentsDir = FileManager().urls(for: .documentDirectory, in: .userDomainMask).first! +// The location you want to save the new video +let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] - let asset = AVURLAsset(url:movieURL, options:inputOptions) +let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] +let asset = AVURLAsset(url:movieURL, options:inputOptions) - guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } - let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first +guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } +let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first - // If you would like passthrough audio instead, use nil for both audioDecodingSettings and audioEncodingSettings - let audioDecodingSettings:[String:Any] = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples +// If you would like passthrough audio instead, use nil for both audioDecodingSettings and audioEncodingSettings +let audioDecodingSettings:[String:Any] = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples - do { - movieInput = try MovieInput(asset: asset, videoComposition: nil, playAtActualSpeed: false, loop: false, audioSettings: audioDecodingSettings) - } - catch { - print("ERROR: Unable to setup MovieInput with error: \(error)") - return - } +do { + movieInput = try MovieInput(asset: asset, videoComposition: nil, playAtActualSpeed: false, loop: false, audioSettings: audioDecodingSettings) +} +catch { + print("ERROR: Unable to setup MovieInput with error: \(error)") + return +} - try? FileManager().removeItem(at: exportedURL) - - let videoEncodingSettings:[String:Any] = [AVVideoCompressionPropertiesKey: [ - AVVideoExpectedSourceFrameRateKey: videoTrack.nominalFrameRate, - AVVideoAverageBitRateKey: videoTrack.estimatedDataRate, - AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, - AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC, - AVVideoAllowFrameReorderingKey: videoTrack.requiresFrameReordering], - AVVideoCodecKey: AVVideoCodecH264] - - var acl = AudioChannelLayout() - memset(&acl, 0, MemoryLayout.size) - acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo - - let audioEncodingSettings:[String:Any] = [ - AVFormatIDKey:kAudioFormatMPEG4AAC, - AVNumberOfChannelsKey:2, - AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, - AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), - AVEncoderBitRateKey:96000 - ] - - do { - movieOutput = try MovieOutput(URL: exportedURL, size: Size(width: Float(videoTrack.naturalSize.width), height: Float(videoTrack.naturalSize.height)), fileType: AVFileType.mp4.rawValue, liveVideo: false, videoSettings: videoEncodingSettings, videoNaturalTimeScale: videoTrack.naturalTimeScale, audioSettings: audioEncodingSettings) - } - catch { - print("ERROR: Unable to setup MovieOutput with error: \(error)") - return - } +try? FileManager().removeItem(at: exportedURL) - filter = MissEtikateFilter() +let videoEncodingSettings:[String:Any] = [AVVideoCompressionPropertiesKey: [ + AVVideoExpectedSourceFrameRateKey: videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey: videoTrack.estimatedDataRate, + AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey: videoTrack.requiresFrameReordering], + AVVideoCodecKey: AVVideoCodecH264] - if(audioTrack != nil) { movieInput.audioEncodingTarget = movieOutput } - movieInput.synchronizedMovieOutput = movieOutput - //movieInput.synchronizedEncodingDebug = true - movieInput --> filter --> movieOutput +var acl = AudioChannelLayout() +memset(&acl, 0, MemoryLayout.size) +acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo - movieInput.completion = { - self.movieOutput.finishRecording { - DispatchQueue.main.async { - print("Encoding finished") - } +let audioEncodingSettings:[String:Any] = [ + AVFormatIDKey:kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey:2, + AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), + AVEncoderBitRateKey:96000 +] + +do { + movieOutput = try MovieOutput(URL: exportedURL, size: Size(width: Float(videoTrack.naturalSize.width), height: Float(videoTrack.naturalSize.height)), fileType: AVFileType.mp4.rawValue, liveVideo: false, videoSettings: videoEncodingSettings, videoNaturalTimeScale: videoTrack.naturalTimeScale, audioSettings: audioEncodingSettings) +} +catch { + print("ERROR: Unable to setup MovieOutput with error: \(error)") + return +} + +filter = MissEtikateFilter() + +if(audioTrack != nil) { movieInput.audioEncodingTarget = movieOutput } +movieInput.synchronizedMovieOutput = movieOutput +//movieInput.synchronizedEncodingDebug = true +movieInput --> filter --> movieOutput + +movieInput.completion = { + self.movieOutput.finishRecording { + DispatchQueue.main.async { + print("Encoding finished") } } +} - movieOutput.startRecording() { started, error in - if(!started) { - print("ERROR: MovieOutput unable to start writing: \(String(describing: error))") - return - } - self.movieInput.start() - print("Encoding started") +movieOutput.startRecording() { started, error in + if(!started) { + print("ERROR: MovieOutput unable to start writing: \(String(describing: error))") + return } + self.movieInput.start() + print("Encoding started") +} ``` The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a lookup filter (Miss Etikate), and directs movie frames to be processed through the lookup filter on their way to the new file. From ae2f5915a08ef65c81d9768226f7465c9e086ee2 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Sun, 1 Apr 2018 23:20:41 -0700 Subject: [PATCH 065/332] README --- README.md | 12 +++++------- .../SimpleMovieEncoding/ViewController.swift | 4 ++-- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 378004f9..3db5c190 100755 --- a/README.md +++ b/README.md @@ -224,13 +224,12 @@ let documentsDir = FileManager().urls(for: .documentDirectory, in: .userDomainMa // The location you want to save the new video let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! -let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] -let asset = AVURLAsset(url:movieURL, options:inputOptions) +let asset = AVURLAsset(url:movieURL, options:[AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)]) guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first -// If you would like passthrough audio instead, use nil for both audioDecodingSettings and audioEncodingSettings +// If you would like passthrough audio instead, set both audioDecodingSettings and audioEncodingSettings to nil let audioDecodingSettings:[String:Any] = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples do { @@ -271,11 +270,10 @@ catch { return } -filter = MissEtikateFilter() +filter = SaturationAdjustment() if(audioTrack != nil) { movieInput.audioEncodingTarget = movieOutput } movieInput.synchronizedMovieOutput = movieOutput -//movieInput.synchronizedEncodingDebug = true movieInput --> filter --> movieOutput movieInput.completion = { @@ -288,7 +286,7 @@ movieInput.completion = { movieOutput.startRecording() { started, error in if(!started) { - print("ERROR: MovieOutput unable to start writing: \(String(describing: error))") + print("ERROR: MovieOutput unable to start writing with error: \(String(describing: error))") return } self.movieInput.start() @@ -296,7 +294,7 @@ movieOutput.startRecording() { started, error in } ``` - The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a lookup filter (Miss Etikate), and directs movie frames to be processed through the lookup filter on their way to the new file. + The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a saturation filter, and directs movie frames to be processed through the saturation filter on their way to the new file. ### Writing a custom image processing operation ### diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift index d7136df8..15b19a55 100644 --- a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift @@ -37,7 +37,7 @@ class ViewController: UIViewController { guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first - // If you would like passthrough audio instead, use nil for both audioDecodingSettings and audioEncodingSettings + // If you would like passthrough audio instead, set both audioDecodingSettings and audioEncodingSettings to nil let audioDecodingSettings:[String:Any] = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples do { @@ -100,7 +100,7 @@ class ViewController: UIViewController { movieOutput.startRecording() { started, error in if(!started) { - print("ERROR: MovieOutput unable to start writing: \(String(describing: error))") + print("ERROR: MovieOutput unable to start writing with error: \(String(describing: error))") return } self.movieInput.start() From 9b237f77ce0efde3b677b37ebbfeee43ba92007b Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 2 Apr 2018 00:09:27 -0700 Subject: [PATCH 066/332] README --- README.md | 34 +++++----- .../SimpleMovieEncoding/ViewController.swift | 65 +++++++++++-------- framework/Source/iOS/MovieOutput.swift | 1 + 3 files changed, 56 insertions(+), 44 deletions(-) diff --git a/README.md b/README.md index 3db5c190..1c99ebf1 100755 --- a/README.md +++ b/README.md @@ -220,20 +220,20 @@ let bundleURL = Bundle.main.resourceURL! // The movie you want to reencode let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! -let documentsDir = FileManager().urls(for: .documentDirectory, in: .userDomainMask).first! +let documentsDir = FileManager().urls(for:.documentDirectory, in:.userDomainMask).first! // The location you want to save the new video let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! let asset = AVURLAsset(url:movieURL, options:[AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)]) -guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } -let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first +guard let videoTrack = asset.tracks(withMediaType:AVMediaType.video).first else { return } +let audioTrack = asset.tracks(withMediaType:AVMediaType.audio).first // If you would like passthrough audio instead, set both audioDecodingSettings and audioEncodingSettings to nil -let audioDecodingSettings:[String:Any] = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples +let audioDecodingSettings:[String:Any] = [AVFormatIDKey:kAudioFormatLinearPCM] // Noncompressed audio samples do { - movieInput = try MovieInput(asset: asset, videoComposition: nil, playAtActualSpeed: false, loop: false, audioSettings: audioDecodingSettings) + movieInput = try MovieInput(asset:asset, videoComposition:nil, playAtActualSpeed:false, loop:false, audioSettings:audioDecodingSettings) } catch { print("ERROR: Unable to setup MovieInput with error: \(error)") @@ -242,18 +242,18 @@ catch { try? FileManager().removeItem(at: exportedURL) -let videoEncodingSettings:[String:Any] = [AVVideoCompressionPropertiesKey: [ - AVVideoExpectedSourceFrameRateKey: videoTrack.nominalFrameRate, - AVVideoAverageBitRateKey: videoTrack.estimatedDataRate, - AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, - AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC, - AVVideoAllowFrameReorderingKey: videoTrack.requiresFrameReordering], - AVVideoCodecKey: AVVideoCodecH264] +let videoEncodingSettings:[String:Any] = [ + AVVideoCompressionPropertiesKey: [ + AVVideoExpectedSourceFrameRateKey:videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey:videoTrack.estimatedDataRate, + AVVideoProfileLevelKey:AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey:videoTrack.requiresFrameReordering], + AVVideoCodecKey:AVVideoCodecH264] var acl = AudioChannelLayout() memset(&acl, 0, MemoryLayout.size) acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo - let audioEncodingSettings:[String:Any] = [ AVFormatIDKey:kAudioFormatMPEG4AAC, AVNumberOfChannelsKey:2, @@ -263,7 +263,7 @@ let audioEncodingSettings:[String:Any] = [ ] do { - movieOutput = try MovieOutput(URL: exportedURL, size: Size(width: Float(videoTrack.naturalSize.width), height: Float(videoTrack.naturalSize.height)), fileType: AVFileType.mp4.rawValue, liveVideo: false, videoSettings: videoEncodingSettings, videoNaturalTimeScale: videoTrack.naturalTimeScale, audioSettings: audioEncodingSettings) + movieOutput = try MovieOutput(URL:exportedURL, size:Size(width:Float(videoTrack.naturalSize.width), height:Float(videoTrack.naturalSize.height)), fileType:AVFileType.mp4.rawValue, liveVideo:false, videoSettings:videoEncodingSettings, videoNaturalTimeScale:videoTrack.naturalTimeScale, audioSettings:audioEncodingSettings) } catch { print("ERROR: Unable to setup MovieOutput with error: \(error)") @@ -278,9 +278,7 @@ movieInput --> filter --> movieOutput movieInput.completion = { self.movieOutput.finishRecording { - DispatchQueue.main.async { - print("Encoding finished") - } + print("Encoding finished") } } @@ -294,7 +292,7 @@ movieOutput.startRecording() { started, error in } ``` - The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a saturation filter, and directs movie frames to be processed through the saturation filter on their way to the new file. + The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a saturation filter, and directs movie frames to be processed through the saturation filter on their way to the new file. In addition it writes the audio in AAC format to the new file. ### Writing a custom image processing operation ### diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift index 15b19a55..3ddf2d5a 100644 --- a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift @@ -27,21 +27,45 @@ class ViewController: UIViewController { // The movie you want to reencode let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! - let documentsDir = FileManager().urls(for: .documentDirectory, in: .userDomainMask).first! + let documentsDir = FileManager().urls(for:.documentDirectory, in:.userDomainMask).first! // The location you want to save the new video let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] let asset = AVURLAsset(url:movieURL, options:inputOptions) - guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } - let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first + guard let videoTrack = asset.tracks(withMediaType:AVMediaType.video).first else { return } + let audioTrack = asset.tracks(withMediaType:AVMediaType.audio).first - // If you would like passthrough audio instead, set both audioDecodingSettings and audioEncodingSettings to nil - let audioDecodingSettings:[String:Any] = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples + let audioDecodingSettings:[String:Any]? + let audioEncodingSettings:[String:Any]? + var audioSourceFormatHint:CMFormatDescription? = nil + + let shouldPassthroughAudio = false + if(shouldPassthroughAudio) { + audioDecodingSettings = nil + audioEncodingSettings = nil + // A format hint is required when writing to certain file types with passthrough audio + // A conditional downcast would not work here for some reason + if let description = audioTrack?.formatDescriptions.first { audioSourceFormatHint = (description as! CMFormatDescription) } + } + else { + audioDecodingSettings = [AVFormatIDKey:kAudioFormatLinearPCM] // Noncompressed audio samples + var acl = AudioChannelLayout() + memset(&acl, 0, MemoryLayout.size) + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo + audioEncodingSettings = [ + AVFormatIDKey:kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey:2, + AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), + AVEncoderBitRateKey:96000 + ] + audioSourceFormatHint = nil + } do { - movieInput = try MovieInput(asset: asset, videoComposition: nil, playAtActualSpeed: false, loop: false, audioSettings: audioDecodingSettings) + movieInput = try MovieInput(asset:asset, videoComposition:nil, playAtActualSpeed:false, loop:false, audioSettings:audioDecodingSettings) } catch { print("ERROR: Unable to setup MovieInput with error: \(error)") @@ -50,28 +74,17 @@ class ViewController: UIViewController { try? FileManager().removeItem(at: exportedURL) - let videoEncodingSettings:[String:Any] = [AVVideoCompressionPropertiesKey: [ - AVVideoExpectedSourceFrameRateKey: videoTrack.nominalFrameRate, - AVVideoAverageBitRateKey: videoTrack.estimatedDataRate, - AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, - AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC, - AVVideoAllowFrameReorderingKey: videoTrack.requiresFrameReordering], - AVVideoCodecKey: AVVideoCodecH264] - - var acl = AudioChannelLayout() - memset(&acl, 0, MemoryLayout.size) - acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo - - let audioEncodingSettings:[String:Any] = [ - AVFormatIDKey:kAudioFormatMPEG4AAC, - AVNumberOfChannelsKey:2, - AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, - AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), - AVEncoderBitRateKey:96000 - ] + let videoEncodingSettings:[String:Any] = [ + AVVideoCompressionPropertiesKey: [ + AVVideoExpectedSourceFrameRateKey:videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey:videoTrack.estimatedDataRate, + AVVideoProfileLevelKey:AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey:videoTrack.requiresFrameReordering], + AVVideoCodecKey:AVVideoCodecH264] do { - movieOutput = try MovieOutput(URL: exportedURL, size: Size(width: Float(videoTrack.naturalSize.width), height: Float(videoTrack.naturalSize.height)), fileType: AVFileType.mp4.rawValue, liveVideo: false, videoSettings: videoEncodingSettings, videoNaturalTimeScale: videoTrack.naturalTimeScale, audioSettings: audioEncodingSettings) + movieOutput = try MovieOutput(URL: exportedURL, size:Size(width:Float(videoTrack.naturalSize.width), height:Float(videoTrack.naturalSize.height)), fileType:AVFileType.mp4.rawValue, liveVideo:false, videoSettings:videoEncodingSettings, videoNaturalTimeScale:videoTrack.naturalTimeScale, audioSettings:audioEncodingSettings, audioSourceFormatHint:audioSourceFormatHint) } catch { print("ERROR: Unable to setup MovieOutput with error: \(error)") diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 43bb5e23..bef4aaa4 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -109,6 +109,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriter.add(assetWriterVideoInput) self.audioSettings = audioSettings + self.audioSourceFormatHint = audioSourceFormatHint self.movieProcessingContext = movieProcessingContext } From 47daaae4fd96f75dc19bae3c4115080a7da465ef Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 2 Apr 2018 02:22:26 -0700 Subject: [PATCH 067/332] Updated comments --- framework/Source/iOS/RenderView.swift | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 4637ac39..21b622c6 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -87,12 +87,14 @@ public class RenderView:UIView, ImageConsumer { displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) - // Without the flush I occasionally get a warning from UIKit on the camera renderView and - // when the warning comes in the renderView just stays black. This happens rarely but often enough to be a problem. - // I tried a transaction and it doesn't silence it and this is likely why --> http://danielkbx.com/post/108060601989/catransaction-flush - // This is also very important because it guarantees the view is layed out at the correct size before it is drawn to. - // Its possible the size of the view was changed right before this was called which would result in us drawing to the view at the old size + // Without the flush you will occasionally get a warning from UIKit and when that happens the RenderView just stays black. + // "CoreAnimation: [EAGLContext renderbufferStorage:fromDrawable:] was called from a non-main thread in an implicit transaction! + // Note that this may be unsafe without an explicit CATransaction or a call to [CATransaction flush]." + // I tried a transaction and that doesn't work and this is probably why --> http://danielkbx.com/post/108060601989/catransaction-flush + // Using flush is important because it guarantees the view is layed out at the correct size before it is drawn to since this is being done on a background thread. + // Its possible the size of the view was changed right before we got here and would result in us drawing to the view at the old size // and then the view size would change to the new size at the next layout pass and distort our already drawn image. + // Since we do not call this function often we do not need to worry about the performance impact of calling flush. CATransaction.flush() sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.internalLayer) @@ -103,10 +105,10 @@ public class RenderView:UIView, ImageConsumer { backingSize = GLSize(width:backingWidth, height:backingHeight) guard (backingWidth > 0 && backingHeight > 0) else { - print("Warning: View had a zero size") + print("WARNING: View had a zero size") if(self.internalLayer.bounds.width > 0 && self.internalLayer.bounds.height > 0) { - print("Warning: View size \(self.internalLayer.bounds) may be too large ") + print("WARNING: View size \(self.internalLayer.bounds) may be too large ") } return false } @@ -115,7 +117,7 @@ public class RenderView:UIView, ImageConsumer { let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) if (status != GLenum(GL_FRAMEBUFFER_COMPLETE)) { - print("Warning: Display framebuffer creation failed with error: \(FramebufferCreationError(errorCode:status))") + print("WARNING: Display framebuffer creation failed with error: \(FramebufferCreationError(errorCode:status))") return false } From 8cba58e3f6b744c7e3f20770e98b902e07eb2af8 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 2 Apr 2018 18:02:47 -0700 Subject: [PATCH 068/332] Comments --- framework/Source/iOS/Camera.swift | 2 -- 1 file changed, 2 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index f5761805..95b9a700 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -177,9 +177,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer deinit { sharedImageProcessingContext.runOperationSynchronously{ self.stopCapture() - //Fix crash when hitting catch block in init block self.videoOutput?.setSampleBufferDelegate(nil, queue:nil) - self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) } } From 2fbb16eba1cf79be05c2779eb7a9fada8703d373 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 2 Apr 2018 18:21:01 -0700 Subject: [PATCH 069/332] Update SimpleVideoRecorder example --- .../SimpleVideoRecorder/ViewController.swift | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift index 471395b4..ad950f26 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift @@ -56,9 +56,10 @@ class ViewController: UIViewController { movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), fileType:AVFileTypeMPEG4, liveVideo:true, videoSettings:videoSettings, audioSettings:audioSettings) camera.audioEncodingTarget = movieOutput filter --> movieOutput! - movieOutput!.startRecording() { started in + movieOutput!.startRecording() { started, error in if(!started) { self.isRecording = false + fatalError("ERROR: Could not start writing with error: \(String(describing: error))") } } DispatchQueue.main.async { From b5ae0305d32ff1dc834b1474c2299ff01dcb52b5 Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Mon, 2 Apr 2018 21:40:06 -0700 Subject: [PATCH 070/332] Fix memory leak --- README.md | 2 ++ .../SimpleMovieEncoding/ViewController.swift | 3 +++ 2 files changed, 5 insertions(+) mode change 100644 => 100755 examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift diff --git a/README.md b/README.md index 1c99ebf1..de4f9270 100755 --- a/README.md +++ b/README.md @@ -278,6 +278,8 @@ movieInput --> filter --> movieOutput movieInput.completion = { self.movieOutput.finishRecording { + self.movieInput.audioEncodingTarget = nil + self.movieInput.synchronizedMovieOutput = nil print("Encoding finished") } } diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift old mode 100644 new mode 100755 index 3ddf2d5a..ce027377 --- a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift @@ -100,6 +100,9 @@ class ViewController: UIViewController { movieInput.completion = { self.movieOutput.finishRecording { + self.movieInput.audioEncodingTarget = nil + self.movieInput.synchronizedMovieOutput = nil + DispatchQueue.main.async { print("Encoding finished") } From 82c1f359acaaafce35cc01657136f16ece6734fd Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 3 May 2018 00:42:47 -0700 Subject: [PATCH 071/332] Fix Camera deadlock and uneaven delegate calls on RenderView renderbuffer generation errors --- framework/Source/iOS/Camera.swift | 10 +++++++++- framework/Source/iOS/RenderView.swift | 26 +++++++++++++++----------- 2 files changed, 24 insertions(+), 12 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 95b9a700..ca370b09 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -175,8 +175,16 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } deinit { + let captureSession = self.captureSession + DispatchQueue.global().async { + if (captureSession.isRunning) { + // Don't call this on the sharedImageProcessingContext otherwise you may get a deadlock + // since this waits for the captureOutput() delegate call to finish. + captureSession.stopRunning() + } + } + sharedImageProcessingContext.runOperationSynchronously{ - self.stopCapture() self.videoOutput?.setSampleBufferDelegate(nil, queue:nil) self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) } diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 21b622c6..5606dd92 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -143,10 +143,23 @@ public class RenderView:UIView, ImageConsumer { } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + let cleanup: () -> Void = { + if(self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false) { + DispatchQueue.main.async { + self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + framebuffer.unlock() + } + } + else { + self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) + framebuffer.unlock() + } + } + let work: () -> Void = { if (self.displayFramebuffer == nil && !self.createDisplayFramebuffer()) { + cleanup() // Bail if we couldn't successfully create the displayFramebuffer - framebuffer.unlock() return } self.activateDisplayFramebuffer() @@ -160,16 +173,7 @@ public class RenderView:UIView, ImageConsumer { sharedImageProcessingContext.presentBufferForDisplay() - if(self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false) { - DispatchQueue.main.async { - self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) - framebuffer.unlock() - } - } - else { - self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) - framebuffer.unlock() - } + cleanup() } if(self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false) { From ce78c0fc936e7b0f8d816e89953e1d443f8a2e7c Mon Sep 17 00:00:00 2001 From: Josh Bernfeld Date: Thu, 10 May 2018 13:58:37 -0700 Subject: [PATCH 072/332] Add mute support to SpeakerOutput --- framework/Source/iOS/SpeakerOutput.swift | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift index 03e00624..499d4763 100644 --- a/framework/Source/iOS/SpeakerOutput.swift +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -16,6 +16,9 @@ public class SpeakerOutput: AudioEncodingTarget { public var changesAudioSession = true public private(set) var isPlaying = false + + public var isMuted = false + var hasBuffer = false var isReadyForMoreMediaData = true { willSet { @@ -301,8 +304,10 @@ func playbackCallback( let requestedBytesSize = inNumberFrames * p.unitSize * numberOfChannels let bytesToRead = min(availableBytes, requestedBytesSize) - // Copy the bytes from the circular buffer into the outSample - memcpy(outSamples, bufferTail, Int(bytesToRead)) + if(!p.isMuted) { + // Copy the bytes from the circular buffer into the outSample + memcpy(outSamples, bufferTail, Int(bytesToRead)) + } // Clear what we just read out of the circular buffer TPCircularBufferConsume(&p.circularBuffer, bytesToRead) From e6851391254fa46a696c1ef03b0634bda8fcf607 Mon Sep 17 00:00:00 2001 From: RoCry Date: Mon, 26 Feb 2018 17:58:19 +0800 Subject: [PATCH 073/332] add podspec --- GPUImage2.podspec | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 GPUImage2.podspec diff --git a/GPUImage2.podspec b/GPUImage2.podspec new file mode 100644 index 00000000..a12aeb90 --- /dev/null +++ b/GPUImage2.podspec @@ -0,0 +1,20 @@ +Pod::Spec.new do |s| + s.name = 'GPUImage2' + s.version = '0.1.0' + s.license = 'BSD' + s.summary = 'An open source iOS framework for GPU-based image and video processing.' + s.homepage = 'https://github.com/BradLarson/GPUImage2' + s.author = { 'Brad Larson' => 'contact@sunsetlakesoftware.com' } + + s.source = { :git => 'https://github.com/RoCry/GPUImage2' } + + s.source_files = 'framework/Source/**/*.{h,m,swift}' + s.resources = 'framework/Source/Operations/Shaders/*.{fsh}' + s.requires_arc = true + s.xcconfig = { 'CLANG_MODULES_AUTOLINK' => 'YES', 'OTHER_SWIFT_FLAGS' => "$(inherited) -DGLES"} + + s.ios.deployment_target = '8.0' + s.ios.exclude_files = 'framework/Source/Mac', 'framework/Source/Linux', 'framework/Source/Operations/Shaders/ConvertedShaders_GL.swift' + s.frameworks = ['OpenGLES', 'CoreMedia', 'QuartzCore', 'AVFoundation'] + +end From 1df72c965074ef55567fb442c621688c38722449 Mon Sep 17 00:00:00 2001 From: RoCry Date: Sun, 6 May 2018 11:30:13 +0800 Subject: [PATCH 074/332] Support swap location and fix orientation --- framework/Source/iOS/Camera.swift | 60 ++++++++++++++++++++++++------- 1 file changed, 48 insertions(+), 12 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index ca370b09..31b537ab 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -12,9 +12,9 @@ public enum PhysicalCameraLocation { // Documentation: "The front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight." func imageOrientation() -> ImageOrientation { switch self { - case .backFacing: return .landscapeRight - case .frontFacing: return .landscapeLeft - case .frontFacingMirrored: return .landscapeLeft + case .backFacing: return .portrait + case .frontFacing: return .portrait + case .frontFacingMirrored: return .portrait } } @@ -46,7 +46,34 @@ let initialBenchmarkFramesToIgnore = 5 public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { public var location:PhysicalCameraLocation { didSet { - // TODO: Swap the camera locations, framebuffers as needed + if oldValue == location { return } + + let devicePosition = location.captureDevicePosition() + + guard let device = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo).first(where: { + ($0 as? AVCaptureDevice)?.position == devicePosition + }) as? AVCaptureDevice else { + fatalError("ERROR: Can't find video devices for \(devicePosition)") + } + + do { + let newVideoInput = try AVCaptureDeviceInput(device: device) + captureSession.beginConfiguration() + + captureSession.removeInput(videoInput) + if captureSession.canAddInput(newVideoInput) { + captureSession.addInput(newVideoInput) + videoInput = newVideoInput + } else { + captureSession.addInput(videoInput) + } + + Camera.updateOrientation(location: location, videoOutput: videoOutput) + + captureSession.commitConfiguration() + } catch let error { + fatalError("ERROR: Could not init device: \(error)") + } } } public var runBenchmark:Bool = false @@ -69,7 +96,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public weak var delegate: CameraDelegate? public let captureSession:AVCaptureSession public let inputCamera:AVCaptureDevice! - public let videoInput:AVCaptureDeviceInput! + public private(set) var videoInput:AVCaptureDeviceInput! public let videoOutput:AVCaptureVideoDataOutput! public var microphone:AVCaptureDevice? public var audioInput:AVCaptureDeviceInput? @@ -156,13 +183,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } captureSession.sessionPreset = sessionPreset - if let connections = videoOutput.connections as? [AVCaptureConnection] { - for connection in connections { - if(connection.isVideoMirroringSupported) { - connection.isVideoMirrored = (location == .frontFacingMirrored) - } - } - } + Camera.updateOrientation(location: location, videoOutput: videoOutput) captureSession.commitConfiguration() @@ -360,3 +381,18 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) } } + +private extension Camera { + static func updateOrientation(location: PhysicalCameraLocation, videoOutput: AVCaptureOutput) { + if let connections = videoOutput.connections as? [AVCaptureConnection] { + for connection in connections { + if connection.isVideoMirroringSupported { + connection.isVideoMirrored = (location == .frontFacingMirrored) + } + if connection.isVideoOrientationSupported { + connection.videoOrientation = .portrait + } + } + } + } +} From bbdd2ba95533c8d73e11eaf1922065e2ef62f22f Mon Sep 17 00:00:00 2001 From: RoCry Date: Fri, 11 May 2018 15:01:05 +0800 Subject: [PATCH 075/332] Change target to 10.0 --- GPUImage2.podspec | 2 +- framework/GPUImage.xcodeproj/project.pbxproj | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/GPUImage2.podspec b/GPUImage2.podspec index a12aeb90..6ce15189 100644 --- a/GPUImage2.podspec +++ b/GPUImage2.podspec @@ -13,7 +13,7 @@ Pod::Spec.new do |s| s.requires_arc = true s.xcconfig = { 'CLANG_MODULES_AUTOLINK' => 'YES', 'OTHER_SWIFT_FLAGS' => "$(inherited) -DGLES"} - s.ios.deployment_target = '8.0' + s.ios.deployment_target = '10.0' s.ios.exclude_files = 'framework/Source/Mac', 'framework/Source/Linux', 'framework/Source/Operations/Shaders/ConvertedShaders_GL.swift' s.frameworks = ['OpenGLES', 'CoreMedia', 'QuartzCore', 'AVFoundation'] diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index f8378d7c..8bd09e64 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -1964,7 +1964,7 @@ DYLIB_INSTALL_NAME_BASE = "@rpath"; INFOPLIST_FILE = "$(SRCROOT)/GPUImage.xcodeproj/GPUImage_Info.plist"; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; OTHER_SWIFT_FLAGS = "-DDEBUG -DGLES"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.GPUImage; @@ -1995,7 +1995,7 @@ DYLIB_INSTALL_NAME_BASE = "@rpath"; INFOPLIST_FILE = "$(SRCROOT)/GPUImage.xcodeproj/GPUImage_Info.plist"; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; OTHER_SWIFT_FLAGS = "-DGLES"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.GPUImage; From d98f70213ebe306130d7f44f81d638c5a684dd14 Mon Sep 17 00:00:00 2001 From: RoCry Date: Sun, 13 May 2018 14:42:53 +0800 Subject: [PATCH 076/332] Capture still photo, turn on stabilization --- framework/Source/iOS/Camera.swift | 39 +++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 31b537ab..2c8f41c5 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -48,12 +48,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer didSet { if oldValue == location { return } - let devicePosition = location.captureDevicePosition() - - guard let device = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo).first(where: { - ($0 as? AVCaptureDevice)?.position == devicePosition - }) as? AVCaptureDevice else { - fatalError("ERROR: Can't find video devices for \(devicePosition)") + guard let device = location.device() else { + fatalError("ERROR: Can't find video devices for \(location)") } do { @@ -92,6 +88,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } + public private(set) var photoOutput: AVCapturePhotoOutput? + public let targets = TargetContainer() public weak var delegate: CameraDelegate? public let captureSession:AVCaptureSession @@ -117,7 +115,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var captureSessionRestartAttempts = 0 - public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true) throws { + public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil) throws { self.location = location self.captureAsYUV = captureAsYUV @@ -181,8 +179,22 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if (captureSession.canAddOutput(videoOutput)) { captureSession.addOutput(videoOutput) } + + if let photoOutput = photoOutput { + self.photoOutput = photoOutput + if (captureSession.canAddOutput(photoOutput)) { + captureSession.addOutput(photoOutput) + } + } + captureSession.sessionPreset = sessionPreset + let videoOutputConnection = videoOutput.connections.first as! AVCaptureConnection + if videoOutputConnection.isVideoStabilizationSupported { + videoOutputConnection.preferredVideoStabilizationMode = .standard + } + print("isVideoStabilizationSupported: \(videoOutputConnection.isVideoStabilizationSupported), activeVideoStabilizationMode: \(videoOutputConnection.activeVideoStabilizationMode.rawValue)") + Camera.updateOrientation(location: location, videoOutput: videoOutput) captureSession.commitConfiguration() @@ -195,6 +207,19 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer NotificationCenter.default.addObserver(self, selector: #selector(Camera.captureSessionDidStartRunning(note:)), name: NSNotification.Name.AVCaptureSessionDidStartRunning, object: nil) } + public func captureStillImage(delegate: AVCapturePhotoCaptureDelegate, settings: AVCapturePhotoSettings? = nil) { + guard let photoOutput = photoOutput else { + fatalError("didn't setup photo output") + } + + let photoSettings = settings ?? AVCapturePhotoSettings() + + photoSettings.isAutoStillImageStabilizationEnabled = photoOutput.isStillImageStabilizationSupported + + print("isStillImageStabilizationSupported: \(photoOutput.isStillImageStabilizationSupported), isStillImageStabilizationScene: \(photoOutput.isStillImageStabilizationScene)") + photoOutput.capturePhoto(with: photoSettings, delegate: delegate) + } + deinit { let captureSession = self.captureSession DispatchQueue.global().async { From 304a462b32941067d468e6a5e55a1e848cdceda5 Mon Sep 17 00:00:00 2001 From: RoCry Date: Mon, 14 May 2018 10:00:24 +0800 Subject: [PATCH 077/332] Add ResizeCrop --- framework/Source/Operations/ResizeCrop.swift | 50 ++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 framework/Source/Operations/ResizeCrop.swift diff --git a/framework/Source/Operations/ResizeCrop.swift b/framework/Source/Operations/ResizeCrop.swift new file mode 100644 index 00000000..27703969 --- /dev/null +++ b/framework/Source/Operations/ResizeCrop.swift @@ -0,0 +1,50 @@ +// +// ResizeCrop.swift +// Alamofire +// +// Created by rocry on 5/14/18. +// + +open class ResizeCrop: BasicOperation { + public var cropSizeInPixels: Size? + + public init() { + super.init(fragmentShader:PassthroughFragmentShader, numberOfInputs:1) + } + + override open func renderFrame() { + let inputFramebuffer:Framebuffer = inputFramebuffers[0]! + let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait) + + let finalCropSize:GLSize + let normalizedOffsetFromOrigin:Position + if let cropSize = cropSizeInPixels { + let glCropSize: GLSize + + let ratioW = cropSize.width / Float(inputSize.width) + let ratioH = cropSize.height / Float(inputSize.height) + if ratioW > ratioH { + glCropSize = GLSize(width: inputSize.width, height: GLint(Float(inputSize.width) * (cropSize.height / cropSize.width))) + } else { + glCropSize = GLSize(width: GLint(Float(inputSize.height) * (cropSize.width / cropSize.height)), height: inputSize.height) + } + + finalCropSize = GLSize(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) + normalizedOffsetFromOrigin = Position(Float(inputSize.width / 2 - finalCropSize.width / 2) / Float(inputSize.width), + Float(inputSize.height / 2 - finalCropSize.height / 2) / Float(inputSize.height)) + } else { + finalCropSize = inputSize + normalizedOffsetFromOrigin = Position.zero + } + let normalizedCropSize = Size(width:Float(finalCropSize.width) / Float(inputSize.width), height:Float(finalCropSize.height) / Float(inputSize.height)) + + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:finalCropSize, stencil:false) + + let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) + + renderFramebuffer.activateFramebufferForRendering() + clearFramebufferWithColor(backgroundColor) + renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[textureProperties]) + releaseIncomingFramebuffers() + } +} From 9e4e0209eb2b8a8a559a9a771900ac5dfbeabaa3 Mon Sep 17 00:00:00 2001 From: RoCry Date: Mon, 14 May 2018 10:06:48 +0800 Subject: [PATCH 078/332] Remove isAutoStillImageStabilizationEnabled --- framework/Source/iOS/Camera.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 2c8f41c5..e4d450ad 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -214,7 +214,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let photoSettings = settings ?? AVCapturePhotoSettings() - photoSettings.isAutoStillImageStabilizationEnabled = photoOutput.isStillImageStabilizationSupported +// photoSettings.isAutoStillImageStabilizationEnabled = photoOutput.isStillImageStabilizationSupported print("isStillImageStabilizationSupported: \(photoOutput.isStillImageStabilizationSupported), isStillImageStabilizationScene: \(photoOutput.isStillImageStabilizationScene)") photoOutput.capturePhoto(with: photoSettings, delegate: delegate) From 9868434165eb9bbbb1f09f09984b42c7df51ad62 Mon Sep 17 00:00:00 2001 From: RoCry Date: Tue, 15 May 2018 19:00:11 +0800 Subject: [PATCH 079/332] Support set encodedJPEGImageCompressionQuality for PictureOutput --- framework/Source/iOS/PictureOutput.swift | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 1db2110b..5ffc03d2 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -9,6 +9,7 @@ public enum PictureFileFormat { public class PictureOutput: ImageConsumer { public var encodedImageAvailableCallback:((Data) -> ())? public var encodedImageFormat:PictureFileFormat = .png + public var encodedJPEGImageCompressionQuality: CGFloat = 0.8 public var imageAvailableCallback:((UIImage) -> ())? public var onlyCaptureNextFrame:Bool = true public var keepImageAroundForSynchronousCapture:Bool = false @@ -81,7 +82,7 @@ public class PictureOutput: ImageConsumer { let imageData:Data switch encodedImageFormat { case .png: imageData = UIImagePNGRepresentation(image)! // TODO: Better error handling here - case .jpeg: imageData = UIImageJPEGRepresentation(image, 0.8)! // TODO: Be able to set image quality + case .jpeg: imageData = UIImageJPEGRepresentation(image, encodedJPEGImageCompressionQuality)! } imageCallback(imageData) From 0b8fa24a8c17337c356ed8bfce01f1525be5b325 Mon Sep 17 00:00:00 2001 From: RoCry Date: Wed, 16 May 2018 12:42:59 +0800 Subject: [PATCH 080/332] Update video stabilization with location --- framework/Source/iOS/Camera.swift | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index e4d450ad..ee2c4fe1 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -60,12 +60,13 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if captureSession.canAddInput(newVideoInput) { captureSession.addInput(newVideoInput) videoInput = newVideoInput + + Camera.updateVideoOutput(location: location, videoOutput: videoOutput) } else { + print("Can't add video input") captureSession.addInput(videoInput) } - Camera.updateOrientation(location: location, videoOutput: videoOutput) - captureSession.commitConfiguration() } catch let error { fatalError("ERROR: Could not init device: \(error)") @@ -189,13 +190,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.sessionPreset = sessionPreset - let videoOutputConnection = videoOutput.connections.first as! AVCaptureConnection - if videoOutputConnection.isVideoStabilizationSupported { - videoOutputConnection.preferredVideoStabilizationMode = .standard - } - print("isVideoStabilizationSupported: \(videoOutputConnection.isVideoStabilizationSupported), activeVideoStabilizationMode: \(videoOutputConnection.activeVideoStabilizationMode.rawValue)") - - Camera.updateOrientation(location: location, videoOutput: videoOutput) + Camera.updateVideoOutput(location: location, videoOutput: videoOutput) captureSession.commitConfiguration() @@ -408,15 +403,22 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } private extension Camera { - static func updateOrientation(location: PhysicalCameraLocation, videoOutput: AVCaptureOutput) { + static func updateVideoOutput(location: PhysicalCameraLocation, videoOutput: AVCaptureOutput) { if let connections = videoOutput.connections as? [AVCaptureConnection] { for connection in connections { if connection.isVideoMirroringSupported { connection.isVideoMirrored = (location == .frontFacingMirrored) } + if connection.isVideoOrientationSupported { connection.videoOrientation = .portrait } + + if connection.isVideoStabilizationSupported { + connection.preferredVideoStabilizationMode = .standard + } + + print("isVideoStabilizationSupported: \(connection.isVideoStabilizationSupported), activeVideoStabilizationMode: \(connection.activeVideoStabilizationMode.rawValue)") } } } From 9e5f84238a3890cfe4961adcf4cc94aa07686b41 Mon Sep 17 00:00:00 2001 From: RoCry Date: Wed, 16 May 2018 16:37:37 +0800 Subject: [PATCH 081/332] Update inputCamera with location --- framework/Source/iOS/Camera.swift | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index ee2c4fe1..57b5028a 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -58,6 +58,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.removeInput(videoInput) if captureSession.canAddInput(newVideoInput) { + inputCamera = device captureSession.addInput(newVideoInput) videoInput = newVideoInput @@ -94,7 +95,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public let targets = TargetContainer() public weak var delegate: CameraDelegate? public let captureSession:AVCaptureSession - public let inputCamera:AVCaptureDevice! + public private(set) var inputCamera:AVCaptureDevice! public private(set) var videoInput:AVCaptureDeviceInput! public let videoOutput:AVCaptureVideoDataOutput! public var microphone:AVCaptureDevice? From 3146dfd4f4a719fcd336a0ebadc9290fa4907d80 Mon Sep 17 00:00:00 2001 From: RoCry Date: Thu, 17 May 2018 14:12:33 +0800 Subject: [PATCH 082/332] Add GPUImageLogger --- framework/Source/OpenGLContext_Shared.swift | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/framework/Source/OpenGLContext_Shared.swift b/framework/Source/OpenGLContext_Shared.swift index 6fcbf83d..b1229314 100755 --- a/framework/Source/OpenGLContext_Shared.swift +++ b/framework/Source/OpenGLContext_Shared.swift @@ -99,8 +99,18 @@ extension OpenGLContext { } } +public var GPUImageLogger: (String, StaticString, UInt, StaticString) -> () = { stringToPrint, file, line, function in + Swift.print("\(stringToPrint) --> \((String(describing:file) as NSString).lastPathComponent): \(function): \(line)") +} + @_semantics("sil.optimize.never") public func debugPrint(_ stringToPrint:String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { #if DEBUG - print("\(stringToPrint) --> \((String(describing:file) as NSString).lastPathComponent): \(function): \(line)") + print(stringToPrint, file: file, line: line, function: function) #endif } + +@_semantics("sil.optimize.never") public func print(_ stringToPrint:String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { + GPUImageLogger(stringToPrint, file, line, function) +} + + From f27d692ac60a2a515a415c7ca53c9e97d0f2e63b Mon Sep 17 00:00:00 2001 From: RoCry Date: Thu, 17 May 2018 21:24:28 +0800 Subject: [PATCH 083/332] Add cgImageAvailableCallback --- framework/Source/iOS/PictureOutput.swift | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 5ffc03d2..5e8e7da2 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -11,6 +11,7 @@ public class PictureOutput: ImageConsumer { public var encodedImageFormat:PictureFileFormat = .png public var encodedJPEGImageCompressionQuality: CGFloat = 0.8 public var imageAvailableCallback:((UIImage) -> ())? + public var cgImageAvailableCallback:((CGImage) -> ())? public var onlyCaptureNextFrame:Bool = true public var keepImageAroundForSynchronousCapture:Bool = false var storedFramebuffer:Framebuffer? @@ -63,6 +64,16 @@ public class PictureOutput: ImageConsumer { storedFramebuffer = framebuffer } + if let imageCallback = cgImageAvailableCallback { + let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) + + imageCallback(cgImageFromBytes) + + if onlyCaptureNextFrame { + cgImageAvailableCallback = nil + } + } + if let imageCallback = imageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) From 7e94f51f314352ffb92f80cedcd9e243ca667eb8 Mon Sep 17 00:00:00 2001 From: RoCry Date: Mon, 21 May 2018 21:18:34 +0800 Subject: [PATCH 084/332] Support AVCaptureMetadataOutputObjectsDelegate --- framework/Source/iOS/Camera.swift | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 57b5028a..7ee4a80c 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -117,7 +117,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var captureSessionRestartAttempts = 0 - public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil) throws { + public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil) throws { self.location = location self.captureAsYUV = captureAsYUV @@ -189,6 +189,16 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } + if let metadataDelegate = metadataDelegate { + let captureMetadataOutput = AVCaptureMetadataOutput() + if captureSession.canAddOutput(captureMetadataOutput) { + captureSession.addOutput(captureMetadataOutput) + + captureMetadataOutput.setMetadataObjectsDelegate(metadataDelegate, queue: cameraProcessingQueue) + captureMetadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode] + } + } + captureSession.sessionPreset = sessionPreset Camera.updateVideoOutput(location: location, videoOutput: videoOutput) From d230bd06047c0675df16d394bf374a7c11137281 Mon Sep 17 00:00:00 2001 From: RoCry Date: Wed, 23 May 2018 17:20:40 +0800 Subject: [PATCH 085/332] Add debug logs --- framework/Source/BasicOperation.swift | 2 +- framework/Source/iOS/MovieInput.swift | 2 ++ framework/Source/iOS/MovieOutput.swift | 4 ++++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index dfcd0c07..0a5db657 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -78,7 +78,7 @@ open class BasicOperation: ImageProcessingOperation { } deinit { - //debugPrint("Deallocating operation: \(self)") + debugPrint("Deallocating operation: \(self)") } // MARK: - diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 4d815606..cc43ed4d 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -88,6 +88,8 @@ public class MovieInput: ImageSource { } deinit { + debugPrint("movie input deinit \(asset)") + self.movieFramebuffer?.unlock() self.cancel() diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index bef4aaa4..e151a65c 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -58,6 +58,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var synchronizedEncodingDebug = false var totalFramesAppended:Int = 0 + deinit { + debugPrint("movie output deinit \(assetWriter.outputURL)") + } + public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup let movieProcessingContext = OpenGLContext() From 1f6ed8f173ca90a0bf36e696d0cd114e899c8e0f Mon Sep 17 00:00:00 2001 From: RoCry Date: Mon, 11 Jun 2018 22:31:43 +0800 Subject: [PATCH 086/332] Add missing downsamplingFactor for BilateralBlur --- framework/Source/Operations/BilateralBlur.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/Source/Operations/BilateralBlur.swift b/framework/Source/Operations/BilateralBlur.swift index 015d2917..ca47c9d7 100644 --- a/framework/Source/Operations/BilateralBlur.swift +++ b/framework/Source/Operations/BilateralBlur.swift @@ -6,6 +6,7 @@ public class BilateralBlur: TwoStageOperation { public init() { super.init(vertexShader:BilateralBlurVertexShader, fragmentShader:BilateralBlurFragmentShader) + downsamplingFactor = 4.0 ({distanceNormalizationFactor = 1.0})() } } From 3fd2451bf56cf09a9fd026d2d2028935fe5d1dc1 Mon Sep 17 00:00:00 2001 From: RoCry Date: Tue, 26 Jun 2018 18:41:34 +0800 Subject: [PATCH 087/332] Add startProcessingCallback for MovieInput --- framework/Source/iOS/MovieInput.swift | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index cc43ed4d..1bb5f723 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -40,6 +40,7 @@ public class MovieInput: ImageSource { // Called after the video finishes. Not called when cancel() or pause() is called. public var completion: (() -> Void)? + public var startProcessingCallback: (() -> Void)? // Progress block of the video with a paramater value of 0-1. // Can be used to check video encoding progress. Not called from main thread. public var progress: ((Double) -> Void)? @@ -175,6 +176,12 @@ public class MovieInput: ImageSource { } @objc func beginReading() { + if let startProcessingCallback = startProcessingCallback { + DispatchQueue.main.sync { + startProcessingCallback() + } + } + let thread = Thread.current mach_timebase_info(&timebaseInfo) From 905c6dff9a95c0acf56b17c00faa97eb7de9ce04 Mon Sep 17 00:00:00 2001 From: RoCry Date: Thu, 2 Aug 2018 11:01:56 +0800 Subject: [PATCH 088/332] Add debug log for lifecycle --- framework/Source/OpenGLContext_Shared.swift | 2 +- framework/Source/iOS/Camera.swift | 4 ++++ framework/Source/iOS/MovieInput.swift | 2 ++ framework/Source/iOS/MovieOutput.swift | 3 +++ framework/Source/iOS/PictureOutput.swift | 2 ++ 5 files changed, 12 insertions(+), 1 deletion(-) diff --git a/framework/Source/OpenGLContext_Shared.swift b/framework/Source/OpenGLContext_Shared.swift index b1229314..d0f8b713 100755 --- a/framework/Source/OpenGLContext_Shared.swift +++ b/framework/Source/OpenGLContext_Shared.swift @@ -105,7 +105,7 @@ public var GPUImageLogger: (String, StaticString, UInt, StaticString) -> () = { @_semantics("sil.optimize.never") public func debugPrint(_ stringToPrint:String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { #if DEBUG - print(stringToPrint, file: file, line: line, function: function) + print("[GPUImage] " + stringToPrint, file: file, line: line, function: function) #endif } diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 7ee4a80c..db5caa6e 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -118,6 +118,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var captureSessionRestartAttempts = 0 public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil) throws { + + debugPrint("camera init") self.location = location self.captureAsYUV = captureAsYUV @@ -227,6 +229,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } deinit { + debugPrint("camera deinit") + let captureSession = self.captureSession DispatchQueue.global().async { if (captureSession.isRunning) { diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 1bb5f723..5ae1bccb 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -74,6 +74,8 @@ public class MovieInput: ImageSource { // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false, audioSettings:[String:Any]? = nil) throws { + debugPrint("movie input init \(asset)") + self.asset = asset self.videoComposition = videoComposition self.playAtActualSpeed = playAtActualSpeed diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index e151a65c..736fee67 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -63,6 +63,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { + + debugPrint("movie output init \(URL)") + imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup let movieProcessingContext = OpenGLContext() diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 5e8e7da2..5c73a6df 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -21,9 +21,11 @@ public class PictureOutput: ImageConsumer { var url:URL! public init() { + debugPrint("PictureOutput init") } deinit { + debugPrint("PictureOutput deinit") } public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { From 4b98f2846d69025760766797e4e5379968c36290 Mon Sep 17 00:00:00 2001 From: RoCry Date: Thu, 2 Aug 2018 15:39:02 +0800 Subject: [PATCH 089/332] Fix potential race condition for shader cache --- framework/Source/OpenGLContext_Shared.swift | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/framework/Source/OpenGLContext_Shared.swift b/framework/Source/OpenGLContext_Shared.swift index d0f8b713..e7f9d5ae 100755 --- a/framework/Source/OpenGLContext_Shared.swift +++ b/framework/Source/OpenGLContext_Shared.swift @@ -19,13 +19,15 @@ public let sharedImageProcessingContext = OpenGLContext() extension OpenGLContext { public func programForVertexShader(_ vertexShader:String, fragmentShader:String) throws -> ShaderProgram { - let lookupKeyForShaderProgram = "V: \(vertexShader) - F: \(fragmentShader)" - if let shaderFromCache = shaderCache[lookupKeyForShaderProgram] { - return shaderFromCache - } else { - return try self.runOperationSynchronously{ + return try self.runOperationSynchronously{ + let lookupKeyForShaderProgram = "V: \(vertexShader) - F: \(fragmentShader)" + if let shaderFromCache = shaderCache[lookupKeyForShaderProgram] { +// debugPrint("load from cache: \(lookupKeyForShaderProgram)") + return shaderFromCache + } else { let program = try ShaderProgram(vertexShader:vertexShader, fragmentShader:fragmentShader) self.shaderCache[lookupKeyForShaderProgram] = program +// debugPrint("create cache: \(lookupKeyForShaderProgram)") return program } } From e14df6ecb78ef163cf18f69146c162c7302ad409 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 21 Sep 2018 16:12:45 +0800 Subject: [PATCH 090/332] improve(MovieInput): make sure completion will be called in all path --- framework/Source/iOS/MovieInput.swift | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 5ae1bccb..87a90d18 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -4,6 +4,10 @@ public protocol MovieInputDelegate: class { func didFinishMovie() } +enum MovieInputError: Error { + case cannotCreateAssetReader +} + public class MovieInput: ImageSource { public let targets = TargetContainer() public var runBenchmark = false @@ -39,7 +43,7 @@ public class MovieInput: ImageSource { public var loop:Bool // Called after the video finishes. Not called when cancel() or pause() is called. - public var completion: (() -> Void)? + public var completion: ((Error?) -> Void)? public var startProcessingCallback: (() -> Void)? // Progress block of the video with a paramater value of 0-1. // Can be used to check video encoding progress. Not called from main thread. @@ -200,6 +204,7 @@ public class MovieInput: ImageSource { } guard let assetReader = self.createReader() else { + completion?(MovieInputError.cannotCreateAssetReader) return // A return statement in this frame will end thread execution. } @@ -207,12 +212,14 @@ public class MovieInput: ImageSource { try NSObject.catchException { guard assetReader.startReading() else { print("ERROR: Unable to start reading: \(String(describing: assetReader.error))") + self.completion?(assetReader.error) return } } } catch { print("ERROR: Unable to start reading: \(error)") + completion?(error) return } @@ -270,7 +277,7 @@ public class MovieInput: ImageSource { } else { self.delegate?.didFinishMovie() - self.completion?() + self.completion?(nil) self.synchronizedEncodingDebugPrint("MovieInput finished reading") self.synchronizedEncodingDebugPrint("MovieInput total frames sent: \(self.totalFramesSent)") From d2a07b59adf13bbeb8b90643ed779c9cd85d68c3 Mon Sep 17 00:00:00 2001 From: RoCry Date: Mon, 17 Dec 2018 15:51:01 +0800 Subject: [PATCH 091/332] Export MovieInput.asset --- framework/Source/iOS/MovieInput.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 87a90d18..2e0a0b94 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -27,7 +27,7 @@ public class MovieInput: ImageSource { } let yuvConversionShader:ShaderProgram - let asset:AVAsset + public let asset:AVAsset let videoComposition:AVVideoComposition? var playAtActualSpeed:Bool From bea11d5e33b0d892951d4af8c55b9cc56d2722a9 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 28 Dec 2018 16:19:13 +0800 Subject: [PATCH 092/332] chore: change frame buffer cache to public --- framework/Source/iOS/OpenGLContext.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index dec61982..662f381e 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -7,7 +7,7 @@ var imageProcessingShareGroup:EAGLSharegroup? = nil var dispatchQueKeyValueCounter = 81 public class OpenGLContext: SerialDispatch { - lazy var framebufferCache:FramebufferCache = { + public lazy var framebufferCache:FramebufferCache = { return FramebufferCache(context:self) }() var shaderCache:[String:ShaderProgram] = [:] From e70c5311bd65fc6a621a69ea84deed359dc18263 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 7 Jan 2019 02:11:11 +0800 Subject: [PATCH 093/332] improve: update access scope of OpenGLContext properties --- framework/Source/iOS/OpenGLContext.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index 662f381e..e490cbeb 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -7,14 +7,14 @@ var imageProcessingShareGroup:EAGLSharegroup? = nil var dispatchQueKeyValueCounter = 81 public class OpenGLContext: SerialDispatch { - public lazy var framebufferCache:FramebufferCache = { + public private(set) lazy var framebufferCache:FramebufferCache = { return FramebufferCache(context:self) }() var shaderCache:[String:ShaderProgram] = [:] public let standardImageVBO:GLuint var textureVBOs:[Rotation:GLuint] = [:] - let context:EAGLContext + public let context:EAGLContext lazy var passthroughShader:ShaderProgram = { return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} From e1855440701fc8f17c24c45ba2aa753622535402 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 7 Jan 2019 02:12:20 +0800 Subject: [PATCH 094/332] improve: add disableAttributeCache for ShaderProgram --- framework/Source/ShaderProgram.swift | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index 7e2013bc..6b3f6dea 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -26,6 +26,7 @@ enum ShaderType { public class ShaderProgram { public var colorUniformsUseFourComponents = false + public static var disableAttributeCache: Bool = false let program:GLuint var vertexShader:GLuint! // At some point, the Swift compiler will be able to deal with the early throw and we can convert these to lets var fragmentShader:GLuint! @@ -74,7 +75,7 @@ public class ShaderProgram { // MARK: Attributes and uniforms public func attributeIndex(_ attribute:String) -> GLuint? { - if let attributeAddress = attributeAddresses[attribute] { + if let attributeAddress = attributeAddresses[attribute], !ShaderProgram.disableAttributeCache { return attributeAddress } else { var attributeAddress:GLint = -1 @@ -86,7 +87,9 @@ public class ShaderProgram { return nil } else { glEnableVertexAttribArray(GLuint(attributeAddress)) - attributeAddresses[attribute] = GLuint(attributeAddress) + if !ShaderProgram.disableAttributeCache { + attributeAddresses[attribute] = GLuint(attributeAddress) + } return GLuint(attributeAddress) } } From 5a6acb5329b7b533c842ce2facea1461718253b5 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 11 Jan 2019 13:54:03 +0800 Subject: [PATCH 095/332] add capability to reduce sticky video --- framework/Source/iOS/Camera.swift | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index db5caa6e..b0b75d47 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -101,6 +101,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var microphone:AVCaptureDevice? public var audioInput:AVCaptureDeviceInput? public var audioOutput:AVCaptureAudioDataOutput? + public var dontDropFrames: Bool = false var supportsFullYUVRange:Bool = false let captureAsYUV:Bool @@ -266,7 +267,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer return } - guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return } + let notFrameDrop = dontDropFrames + + guard notFrameDrop || (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return } let startTime = CFAbsoluteTimeGetCurrent() @@ -355,7 +358,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.framesSinceLastCheck += 1 } - self.frameRenderingSemaphore.signal() + if !notFrameDrop { + self.frameRenderingSemaphore.signal() + } } } From efa5c3f11d741c8376ca5ad3c4f68cc698ac4a1c Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 2 Nov 2018 14:26:31 +0800 Subject: [PATCH 096/332] feat(stabilization): support stabilization mode setting for different camera --- framework/Source/iOS/Camera.swift | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index b0b75d47..219a7130 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -61,8 +61,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer inputCamera = device captureSession.addInput(newVideoInput) videoInput = newVideoInput - - Camera.updateVideoOutput(location: location, videoOutput: videoOutput) + let stableMode = (location == .backFacing ? backCaemraStableMode : frontCameraStableMode) + Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode:stableMode) } else { print("Can't add video input") captureSession.addInput(videoInput) @@ -102,6 +102,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var audioInput:AVCaptureDeviceInput? public var audioOutput:AVCaptureAudioDataOutput? public var dontDropFrames: Bool = false + public var backCaemraStableMode: AVCaptureVideoStabilizationMode = .standard + public var frontCameraStableMode: AVCaptureVideoStabilizationMode = .standard var supportsFullYUVRange:Bool = false let captureAsYUV:Bool @@ -423,7 +425,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } private extension Camera { - static func updateVideoOutput(location: PhysicalCameraLocation, videoOutput: AVCaptureOutput) { + static func updateVideoOutput(location: PhysicalCameraLocation, videoOutput: AVCaptureOutput, stableMode: AVCaptureVideoStabilizationMode = .standard) { if let connections = videoOutput.connections as? [AVCaptureConnection] { for connection in connections { if connection.isVideoMirroringSupported { @@ -435,7 +437,7 @@ private extension Camera { } if connection.isVideoStabilizationSupported { - connection.preferredVideoStabilizationMode = .standard + connection.preferredVideoStabilizationMode = stableMode } print("isVideoStabilizationSupported: \(connection.isVideoStabilizationSupported), activeVideoStabilizationMode: \(connection.activeVideoStabilizationMode.rawValue)") From 92617a241204410714a2a1e55b329c4948774860 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 12 Jan 2019 02:13:09 +0800 Subject: [PATCH 097/332] add keepLastPixelBuffer to get last recorded frame pixel buffer --- framework/Source/iOS/MovieOutput.swift | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 736fee67..34bbcf90 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -47,7 +47,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } } - var pixelBuffer:CVPixelBuffer? = nil + public private(set) var pixelBuffer:CVPixelBuffer? = nil + let keepLastPixelBuffer: Bool var renderFramebuffer:Framebuffer! var audioSettings:[String:Any]? = nil @@ -62,7 +63,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { debugPrint("movie output deinit \(assetWriter.outputURL)") } - public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { + public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil, keepLastPixelBuffer: Bool = false) throws { debugPrint("movie output init \(URL)") @@ -119,6 +120,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.audioSourceFormatHint = audioSourceFormatHint self.movieProcessingContext = movieProcessingContext + self.keepLastPixelBuffer = keepLastPixelBuffer } public func startRecording(_ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { @@ -226,6 +228,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { usleep(100000) // 0.1 seconds } + if self.keepLastPixelBuffer { + self.pixelBuffer = nil + } let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { print("WARNING: Unable to create pixel buffer, dropping frame") @@ -252,7 +257,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - self.pixelBuffer = nil + if !self.keepLastPixelBuffer { + self.pixelBuffer = nil + } sharedImageProcessingContext.runOperationAsynchronously { framebuffer.unlock() From 8f55ca9238b99c36969eb6ad82f7fd5fd2b71b93 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 16 Jan 2019 18:27:15 +0800 Subject: [PATCH 098/332] add raw sampleBuffer reading and writing to MovieInput and MovieOutput --- framework/Source/iOS/MovieInput.swift | 6 +++ framework/Source/iOS/MovieOutput.swift | 67 ++++++++++++++++++++++++++ 2 files changed, 73 insertions(+) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 2e0a0b94..d0c90d74 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -2,6 +2,7 @@ import AVFoundation public protocol MovieInputDelegate: class { func didFinishMovie() + func didReadVideoFrame(_ sampleBuffer: CMSampleBuffer) } enum MovieInputError: Error { @@ -299,6 +300,11 @@ public class MovieInput: ImageSource { return } + if delegate != nil { + sharedImageProcessingContext.runOperationSynchronously{ [weak self] in + self?.delegate?.didReadVideoFrame(sampleBuffer) + } + } self.synchronizedEncodingDebugPrint("Process frame input") diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 34bbcf90..d9e3fce4 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -307,6 +307,73 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } + // MARK: Append buffer directly from CMSampleBuffer + public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { + let work = { + defer { + if(shouldInvalidateSampleWhenDone) { + CMSampleBufferInvalidate(sampleBuffer) + } + } + + guard self.isRecording, + self.assetWriter.status == .writing, + !self.videoEncodingIsFinished else { + self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") + return + } + + let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard (frameTime != self.previousFrameTime) else { return } + + if (self.previousFrameTime == nil) { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. + self.assetWriter.startSession(atSourceTime: frameTime) + } + + self.previousFrameTime = frameTime + + guard (self.assetWriterVideoInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { + print("Had to drop a frame at time \(frameTime)") + return + } + + guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") + return + } + + while(!self.assetWriterVideoInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.videoEncodingIsFinished) { + self.synchronizedEncodingDebugPrint("Video waiting...") + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds + } + + do { + self.synchronizedEncodingDebugPrint("Process frame output") + + try NSObject.catchException { + if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime:frameTime)) { + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") + } + } + } + catch { + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") + } + } + + if(self.encodingLiveVideo) { + movieProcessingContext.runOperationAsynchronously(work) + } + else { + work() + } + } + // MARK: - // MARK: Audio support From 115d595af5cd21017020aac012425cdae7e1bbb0 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 16 Jan 2019 18:28:09 +0800 Subject: [PATCH 099/332] output sample buffer without frame dropping --- framework/Source/iOS/Camera.swift | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 219a7130..644a27d5 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -2,6 +2,16 @@ import Foundation import AVFoundation public protocol CameraDelegate: class { + /// Output original unprocessed sample buffer on AVCaptureDataOutput queue WITHOUT frame drops. + /// + /// - Parameters: + /// - sampleBuffer: original sample buffer + /// It should be very lightweight and delay less than 1/FPS secons. + func didCaptureBufferOnOutputQueue(_ sampleBuffer: CMSampleBuffer) + + /// Output original unprocessed sample buffer on sharedImageProcessing queue WITH frame drops if needed. + /// + /// - Parameter sampleBuffer: original sample buffer func didCaptureBuffer(_ sampleBuffer: CMSampleBuffer) } public enum PhysicalCameraLocation { @@ -268,6 +278,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.processAudioSampleBuffer(sampleBuffer) return } + + delegate?.didCaptureBufferOnOutputQueue(sampleBuffer) let notFrameDrop = dontDropFrames From 59f208dfdc0aad4eecc92faa34c540c6258a1c29 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 19 Jan 2019 02:04:09 +0800 Subject: [PATCH 100/332] fix stabilization mode setting not working --- framework/Source/iOS/Camera.swift | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 644a27d5..c03ea19e 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -71,8 +71,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer inputCamera = device captureSession.addInput(newVideoInput) videoInput = newVideoInput - let stableMode = (location == .backFacing ? backCaemraStableMode : frontCameraStableMode) - Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode:stableMode) + configureStabilization() } else { print("Can't add video input") captureSession.addInput(videoInput) @@ -112,8 +111,20 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var audioInput:AVCaptureDeviceInput? public var audioOutput:AVCaptureAudioDataOutput? public var dontDropFrames: Bool = false - public var backCaemraStableMode: AVCaptureVideoStabilizationMode = .standard - public var frontCameraStableMode: AVCaptureVideoStabilizationMode = .standard + public var backCaemraStableMode: AVCaptureVideoStabilizationMode = .standard { + didSet { + if location == .backFacing { + configureStabilization() + } + } + } + public var frontCameraStableMode: AVCaptureVideoStabilizationMode = .standard { + didSet { + if location != .backFacing { + configureStabilization() + } + } + } var supportsFullYUVRange:Bool = false let captureAsYUV:Bool @@ -241,6 +252,11 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer photoOutput.capturePhoto(with: photoSettings, delegate: delegate) } + func configureStabilization() { + let stableMode = (location == .backFacing ? backCaemraStableMode : frontCameraStableMode) + Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode:stableMode) + } + deinit { debugPrint("camera deinit") From f70fdd7c1a9a0562e71a7d53092b4685b64459cd Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 19 Jan 2019 02:43:59 +0800 Subject: [PATCH 101/332] add cancel writing to MovieOutput --- framework/Source/iOS/MovieOutput.swift | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index d9e3fce4..cfd93f14 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -192,6 +192,25 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } + public func cancelRecording(_ completionCallback:(() -> Void)? = nil) { + movieProcessingContext.runOperationAsynchronously{ + guard self.isRecording, + self.assetWriter.status == .writing else { + completionCallback?() + return + } + + self.audioEncodingIsFinished = false + self.videoEncodingIsFinished = false + + self.isRecording = false + + self.assetWriter.cancelWriting() + completionCallback?() + self.synchronizedEncodingDebugPrint("MovieOutput cancel writing") + } + } + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); From ab8d31f290930fc5b6dceabb1f98214e50567097 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 20 Jan 2019 18:07:11 +0800 Subject: [PATCH 102/332] Add drop first few frames for MovieOutput --- framework/Source/iOS/MovieOutput.swift | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index cfd93f14..89a383ce 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -48,6 +48,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } public private(set) var pixelBuffer:CVPixelBuffer? = nil + public var dropFirstFrames: Int = 0 let keepLastPixelBuffer: Bool var renderFramebuffer:Framebuffer! @@ -215,6 +216,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { glFinish(); let work = { + // Discard first n frames + if self.dropFirstFrames > 0 { + self.dropFirstFrames -= 1 + self.synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(self.dropFirstFrames)") + return + } + guard self.isRecording, self.assetWriter.status == .writing, !self.videoEncodingIsFinished else { From 169e66bff67636744fa74aa5de7e8744977bc1b8 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 25 Jan 2019 01:56:26 +0800 Subject: [PATCH 103/332] add more debug log and fix transcoding might missing first audio sample buffer --- framework/Source/iOS/MovieInput.swift | 5 ++--- framework/Source/iOS/MovieOutput.swift | 29 +++++++++++++++++++++----- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index d0c90d74..77614372 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -306,10 +306,9 @@ public class MovieInput: ImageSource { } } - self.synchronizedEncodingDebugPrint("Process frame input") - var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) var duration = self.asset.duration // Only used for the progress block so its acuracy is not critical + self.synchronizedEncodingDebugPrint("Process frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") self.currentTime = currentSampleTime @@ -363,7 +362,7 @@ public class MovieInput: ImageSource { return } - self.synchronizedEncodingDebugPrint("Process audio sample input") + self.synchronizedEncodingDebugPrint("Process audio sample input. Time:\(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)))") self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: true) } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 89a383ce..f8706e0b 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -47,6 +47,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } } + var pendingAudioBuffers = [CMSampleBuffer]() public private(set) var pixelBuffer:CVPixelBuffer? = nil public var dropFirstFrames: Int = 0 let keepLastPixelBuffer: Bool @@ -184,6 +185,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // the session's samples (that is, no samples will be edited out at the end)." self.assetWriter.endSession(atSourceTime: lastFrame) } + self.pendingAudioBuffers.removeAll() self.assetWriter.finishWriting { completionCallback?() @@ -205,6 +207,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.videoEncodingIsFinished = false self.isRecording = false + self.pendingAudioBuffers.removeAll() self.assetWriter.cancelWriting() completionCallback?() @@ -267,7 +270,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { do { try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) - self.synchronizedEncodingDebugPrint("Process frame output") + self.synchronizedEncodingDebugPrint("Process frame output. Time:\(CMTimeGetSeconds(frameTime))") try NSObject.catchException { if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { @@ -413,8 +416,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { let work = { + var shouldInvalidate = shouldInvalidateSampleWhenDone defer { - if(shouldInvalidateSampleWhenDone) { + if(shouldInvalidate) { CMSampleBufferInvalidate(sampleBuffer) } } @@ -439,12 +443,22 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { usleep(100000) } - self.synchronizedEncodingDebugPrint("Process audio sample output") + self.pendingAudioBuffers.append(sampleBuffer) + guard self.previousFrameTime != nil else { + self.synchronizedEncodingDebugPrint("Add audio sample to pending queue but first video frame is not ready yet. Time:\(CMTimeGetSeconds(currentSampleTime))") + shouldInvalidate = false + return + } + + self.synchronizedEncodingDebugPrint("Process audio sample output. Time:\(CMTimeGetSeconds(currentSampleTime))") do { try NSObject.catchException { - if (!assetWriterAudioInput.append(sampleBuffer)) { - print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + while self.pendingAudioBuffers.count > 0 { + let audioBuffer = self.pendingAudioBuffers.removeFirst() + if (!assetWriterAudioInput.append(audioBuffer)) { + print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + } } } } @@ -457,6 +471,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { movieProcessingContext.runOperationAsynchronously(work) } else { + // Process pending audio buffers at first + while pendingAudioBuffers.count > 0 { + let audioBuffer = pendingAudioBuffers.removeFirst() + processAudioBuffer(audioBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) + } work() } } From 8ba3aa1f9217c7b2e0c03eaae21ff67981889dc7 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 31 Jan 2019 10:51:02 +0800 Subject: [PATCH 104/332] fix possible MovieOutput cannot stop when cancel recording --- framework/Source/iOS/MovieOutput.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index f8706e0b..412bf249 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -203,8 +203,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return } - self.audioEncodingIsFinished = false - self.videoEncodingIsFinished = false + self.audioEncodingIsFinished = true + self.videoEncodingIsFinished = true self.isRecording = false self.pendingAudioBuffers.removeAll() From 376b52929816591fcaa7a0d813f1827e25770bd7 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 4 Feb 2019 00:48:35 +0800 Subject: [PATCH 105/332] support playrate for MovieInput --- framework/Source/iOS/MovieInput.swift | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 77614372..9e6d1d0a 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -42,6 +42,7 @@ public class MovieInput: ImageSource { private(set) public var currentTime:CMTime? public var loop:Bool + public var playrate:Double // Called after the video finishes. Not called when cancel() or pause() is called. public var completion: ((Error?) -> Void)? @@ -78,21 +79,22 @@ public class MovieInput: ImageSource { public var framebufferUserInfo:[AnyHashable:Any]? // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works - public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false, audioSettings:[String:Any]? = nil) throws { + public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false, playrate:Double = 1.0, audioSettings:[String:Any]? = nil) throws { debugPrint("movie input init \(asset)") self.asset = asset self.videoComposition = videoComposition self.playAtActualSpeed = playAtActualSpeed self.loop = loop + self.playrate = playrate self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} self.audioSettings = audioSettings } - public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false, audioSettings:[String:Any]? = nil) throws { + public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false, playrate: Double = 1.0, audioSettings:[String:Any]? = nil) throws { let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] let inputAsset = AVURLAsset(url:url, options:inputOptions) - try self.init(asset:inputAsset, videoComposition: nil, playAtActualSpeed:playAtActualSpeed, loop:loop, audioSettings:audioSettings) + try self.init(asset:inputAsset, videoComposition: nil, playAtActualSpeed:playAtActualSpeed, loop:loop, playrate:playrate, audioSettings:audioSettings) } deinit { @@ -308,7 +310,7 @@ public class MovieInput: ImageSource { var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) var duration = self.asset.duration // Only used for the progress block so its acuracy is not critical - self.synchronizedEncodingDebugPrint("Process frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") + self.synchronizedEncodingDebugPrint("Process video frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") self.currentTime = currentSampleTime @@ -319,7 +321,7 @@ public class MovieInput: ImageSource { } if (self.playAtActualSpeed) { - let currentSampleTimeNanoseconds = Int64(currentSampleTime.seconds * 1_000_000_000) + let currentSampleTimeNanoseconds = Int64(currentSampleTime.seconds * 1_000_000_000 / playrate) let currentActualTime = DispatchTime.now() if(self.actualStartTime == nil) { self.actualStartTime = currentActualTime } From da25233b155df2469c7e0b063e6a45db9cfcc520 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 4 Feb 2019 11:10:48 +0800 Subject: [PATCH 106/332] change location.device() to public --- framework/Source/iOS/Camera.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index c03ea19e..76fe3a48 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -36,7 +36,7 @@ public enum PhysicalCameraLocation { } } - func device() -> AVCaptureDevice? { + public func device() -> AVCaptureDevice? { let devices = AVCaptureDevice.devices(withMediaType:AVMediaTypeVideo) for case let device as AVCaptureDevice in devices! { if (device.position == self.captureDevicePosition()) { From 25a224b13c63b161a9198630d6034c80f2b382d2 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 19 Feb 2019 13:31:04 +0800 Subject: [PATCH 107/332] fix typo --- framework/Source/iOS/Camera.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 76fe3a48..796bdf94 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -111,7 +111,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var audioInput:AVCaptureDeviceInput? public var audioOutput:AVCaptureAudioDataOutput? public var dontDropFrames: Bool = false - public var backCaemraStableMode: AVCaptureVideoStabilizationMode = .standard { + public var backCameraStableMode: AVCaptureVideoStabilizationMode = .standard { didSet { if location == .backFacing { configureStabilization() @@ -253,7 +253,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } func configureStabilization() { - let stableMode = (location == .backFacing ? backCaemraStableMode : frontCameraStableMode) + let stableMode = (location == .backFacing ? backCameraStableMode : frontCameraStableMode) Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode:stableMode) } From 9fc7079a15726fc042f99008a8c9e7df5e0efd5f Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 19 Feb 2019 22:31:39 +0800 Subject: [PATCH 108/332] add MoviePlayer and use AVPlayer --- framework/Source/iOS/MoviePlayer.swift | 347 +++++++++++++++++++++++++ 1 file changed, 347 insertions(+) create mode 100644 framework/Source/iOS/MoviePlayer.swift diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift new file mode 100644 index 00000000..96f116f5 --- /dev/null +++ b/framework/Source/iOS/MoviePlayer.swift @@ -0,0 +1,347 @@ +// +// MoviePlayer.swift +// DayCam +// +// Created by 陈品霖 on 2019/1/30. +// Copyright © 2019 rocry. All rights reserved. +// +import AVFoundation + +public protocol MoviePlayerDelegate: class { + func moviePlayerDidReadPixelBuffer(_ pixelBuffer: CVPixelBuffer, time: TimeInterval) +} + +public typealias MoviePlayerTimeObserverCallback = (TimeInterval) -> Void + +public struct MoviePlayerTimeObserver { + let targetTime: TimeInterval + let callback: MoviePlayerTimeObserverCallback + let observerID: String + init(targetTime: TimeInterval, callback: @escaping MoviePlayerTimeObserverCallback) { + self.targetTime = targetTime + self.callback = callback + observerID = UUID.init().uuidString + } +} + +public class MoviePlayer: ImageSource { + public let targets = TargetContainer() + public var runBenchmark = false + public var logEnabled = false + public weak var delegate: MoviePlayerDelegate? + public let asset: AVAsset + public let player: AVPlayer + public var isPlaying = false + + public var startTime: TimeInterval? + public var endTime: TimeInterval? + public var loop: Bool + + let playerItem: AVPlayerItem + let videoOutput: AVPlayerItemVideoOutput + var displayLink: CADisplayLink? + + let yuvConversionShader: ShaderProgram + + var totalTimeObservers = [MoviePlayerTimeObserver]() + var timeObserversQueue = [MoviePlayerTimeObserver]() + + var timebaseInfo = mach_timebase_info_data_t() + var totalFramesSent = 0 + var totalFrameTime: Double = 0.0 + public var playrate: Float = 1.0 { + didSet { + player.rate = playrate + } + } + public var isMuted: Bool = false { + didSet { + player.isMuted = isMuted + } + } + + var movieFramebuffer: Framebuffer? + var framebufferUserInfo: [AnyHashable:Any]? + var observations = [NSKeyValueObservation]() + + public init(asset: AVAsset, loop: Bool = false) throws { + print("movie player init \(asset)") + self.asset = asset + self.loop = loop + self.yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { + try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), + fragmentShader: YUVConversionFullRangeFragmentShader) + } + + let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] + videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) + videoOutput.suppressesPlayerRendering = true + + playerItem = AVPlayerItem(asset: asset) + playerItem.add(videoOutput) + playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed + player = AVPlayer(playerItem: playerItem) + _setupObservers() + } + + public convenience init(url: URL, loop: Bool = false) throws { + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] + let inputAsset = AVURLAsset(url: url, options: inputOptions) + try self.init(asset: inputAsset, loop: loop) + } + + deinit { + debugPrint("movie player deinit \(asset)") + pause() + movieFramebuffer?.unlock() + observations.forEach { $0.invalidate() } + NotificationCenter.default.removeObserver(self) + } + + // MARK: - + // MARK: Playback control + + public func start() { + isPlaying = true + debugPrint("movie player start \(asset)") + if displayLink != nil { + displayLink?.remove(from: RunLoop.main, forMode: .commonModes) + } + displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallback)) + displayLink?.add(to: RunLoop.main, forMode: .commonModes) + timeObserversQueue.removeAll() + if let endTime = endTime { + let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in + if self?.loop == true { + self?.pause() + self?.start() + } else { + self?.pause() + } + } + timeObserversQueue.append(endTimeObserver) + } + for observer in totalTimeObservers { + guard observer.targetTime >= startTime ?? 0 else { + break + } + timeObserversQueue.append(observer) + } + seekToTime(startTime ?? 0, shouldPlayAfterSeeking: true) + } + + public func pause() { + isPlaying = false + debugPrint("movie player pause \(asset)") + player.pause() + timeObserversQueue.removeAll() + displayLink?.remove(from: RunLoop.current, forMode: .commonModes) + displayLink?.invalidate() + displayLink = nil + } + + public func seekToTime(_ time: TimeInterval, shouldPlayAfterSeeking: Bool) { + player.seek(to: CMTime(seconds: time, preferredTimescale: 600)) { [weak self] success in + print("movie player did seek to time:\(time) success:\(success)") + guard let self = self else { return } + if shouldPlayAfterSeeking { + self.player.rate = self.playrate + } + } + } + + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { + // Not needed for movie inputs + } + + func transmitPreviousFrame() { + sharedImageProcessingContext.runOperationAsynchronously { + if let movieFramebuffer = self.movieFramebuffer { + self.updateTargetsWithFramebuffer(movieFramebuffer) + } + } + } + + public func addTimeObserver(seconds: TimeInterval, callback: @escaping MoviePlayerTimeObserverCallback) -> MoviePlayerTimeObserver { + let timeObserver = MoviePlayerTimeObserver(targetTime: seconds, callback: callback) + totalTimeObservers.append(timeObserver) + totalTimeObservers = totalTimeObservers.sorted { (lhs, rhs) in + return lhs.targetTime > rhs.targetTime + } + return timeObserver + } + + public func removeTimeObserver(timeObserver: MoviePlayerTimeObserver) { + totalTimeObservers.removeAll { (observer) -> Bool in + return observer.observerID == timeObserver.observerID + } + timeObserversQueue.removeAll { (observer) -> Bool in + return observer.observerID == timeObserver.observerID + } + } +} + +private extension MoviePlayer { + // MARK: - + // MARK: Thread configuration + + func nanosToAbs(_ nanos: UInt64) -> UInt64 { + return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer) + } + + func _setupObservers() { + observations.append(player.observe(\AVPlayer.rate) { [weak self] _, _ in + self?.playerRateDidChange() + }) + observations.append(player.observe(\AVPlayer.status) { [weak self] _, _ in + self?.playerStatusDidChange() + }) + NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) + } + + func playerRateDidChange() { + print("rate change to:\(player.rate) asset:\(asset) status:\(player.status.rawValue)") + resumeIfNeeded() + } + + func playerStatusDidChange() { + print("status change to:\(player.status.rawValue) asset:\(asset)") + resumeIfNeeded() + } + + func resumeIfNeeded() { + guard player.status == .readyToPlay && isPlaying == true && player.rate != playrate else { return } + player.rate = playrate + } + + // MARK: - + // MARK: Internal processing functions + + func process(movieFrame: CVPixelBuffer, with sampleTime: CMTime) { + delegate?.moviePlayerDidReadPixelBuffer(movieFrame, time: CMTimeGetSeconds(sampleTime)) + + let bufferHeight = CVPixelBufferGetHeight(movieFrame) + let bufferWidth = CVPixelBufferGetWidth(movieFrame) + CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + + let conversionMatrix = colorConversionMatrix601FullRangeDefault + + let startTime = CFAbsoluteTimeGetCurrent() + + var luminanceGLTexture: CVOpenGLESTexture? + + glActiveTexture(GLenum(GL_TEXTURE0)) + + let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) + + if(luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil) { + print("Could not create LuminanceGLTexture") + return + } + + let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!) + + glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); + + let luminanceFramebuffer: Framebuffer + do { + luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture) + } catch { + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return + } + + var chrominanceGLTexture: CVOpenGLESTexture? + + glActiveTexture(GLenum(GL_TEXTURE1)) + + let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) + + if(chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil) { + print("Could not create ChrominanceGLTexture") + return + } + + let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!) + + glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); + + let chrominanceFramebuffer: Framebuffer + do { + chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, + orientation: .portrait, + size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), + textureOnly: true, + overriddenTexture: chrominanceTexture) + } catch { + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return + } + + movieFramebuffer?.unlock() + let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) + framebuffer.lock() + + convertYUVToRGB(shader: yuvConversionShader, + luminanceFramebuffer: luminanceFramebuffer, + chrominanceFramebuffer: chrominanceFramebuffer, + resultFramebuffer: framebuffer, + colorConversionMatrix: conversionMatrix) + CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + + framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(sampleTime)) + framebuffer.userInfo = framebufferUserInfo + movieFramebuffer = framebuffer + + updateTargetsWithFramebuffer(framebuffer) + + if(runBenchmark || logEnabled) { + totalFramesSent += 1 + } + + if runBenchmark { + let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) + totalFrameTime += currentFrameTime + print("Average frame time :\(1000.0 * totalFrameTime / Double(totalFramesSent)) ms") + print("Current frame time :\(1000.0 * currentFrameTime) ms") + } + } + + @objc func displayLinkCallback(displayLink: CADisplayLink) { + sharedImageProcessingContext.runOperationAsynchronously { + let currentTime = self.player.currentTime() + if self.videoOutput.hasNewPixelBuffer(forItemTime: currentTime) { + guard let pixelBuffer = self.videoOutput.copyPixelBuffer(forItemTime: currentTime, itemTimeForDisplay: nil) else { + print("Failed to copy pixel buffer at time:\(currentTime)") + return + } + self._notifyTimeObserver(with: currentTime) + self.process(movieFrame: pixelBuffer, with: currentTime) + } + } + } + + @objc func playerDidPlayToEnd(notification: Notification) { + guard loop && isPlaying && (endTime == nil || player.currentTime() == playerItem.asset.duration) else { return } + start() + } + + @objc func playerStalled(notification: Notification) { + print("player was stalled. notification:\(notification)") + } + + func _notifyTimeObserver(with sampleTime: CMTime) { + let currentTime = CMTimeGetSeconds(sampleTime) + while let lastObserver = timeObserversQueue.last, lastObserver.targetTime <= currentTime { + timeObserversQueue.removeLast() + DispatchQueue.main.async { + lastObserver.callback(currentTime) + } + } + } +} From dabd4d157a5f8e57082c66a320061e175b2072c1 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 21 Feb 2019 22:22:24 +0800 Subject: [PATCH 109/332] tweak(movieinput): MovieInput support specifying duration of time range --- framework/Source/iOS/MovieInput.swift | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 9e6d1d0a..d6e4ff51 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -34,8 +34,12 @@ public class MovieInput: ImageSource { // Time in the video where it should start. var requestedStartTime:CMTime? + // Time in the video where it should end. + var requestedDuration:CMTime? // Time in the video where it started. var startTime:CMTime? + // Duration of the video from startTime. + var durationFromStart:CMTime? // Time according to device clock when the video started. var actualStartTime:DispatchTime? // Last sample time that played. @@ -110,8 +114,9 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Playback control - public func start(atTime: CMTime) { + public func start(atTime: CMTime, duration: CMTime? = nil) { self.requestedStartTime = atTime + requestedDuration = duration self.start() } @@ -170,10 +175,16 @@ public class MovieInput: ImageSource { } self.startTime = self.requestedStartTime + self.durationFromStart = self.requestedDuration if let requestedStartTime = self.requestedStartTime { - assetReader.timeRange = CMTimeRange(start: requestedStartTime, duration: kCMTimePositiveInfinity) + if let requestedDuration = self.requestedDuration, requestedDuration.seconds > 0, CMTimeAdd(requestedStartTime, requestedDuration) <= asset.duration { + assetReader.timeRange = CMTimeRange(start: requestedStartTime, duration: requestedDuration) + } else { + assetReader.timeRange = CMTimeRange(start: requestedStartTime, duration: kCMTimePositiveInfinity) + } } self.requestedStartTime = nil + self.requestedDuration = nil self.currentTime = nil self.actualStartTime = nil @@ -317,7 +328,11 @@ public class MovieInput: ImageSource { if let startTime = self.startTime { // Make sure our samples start at kCMTimeZero if the video was started midway. currentSampleTime = CMTimeSubtract(currentSampleTime, startTime) - duration = CMTimeSubtract(duration, startTime) + if let durationFromStart = self.durationFromStart, durationFromStart.seconds > 0, CMTimeAdd(startTime, durationFromStart) <= duration { + duration = durationFromStart + } else { + duration = CMTimeSubtract(duration, startTime) + } } if (self.playAtActualSpeed) { From a4452e47f90969fb93eb03fdee95ff44073bce82 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 22 Feb 2019 01:34:25 +0800 Subject: [PATCH 110/332] chore: debug print --- framework/Source/iOS/MoviePlayer.swift | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 96f116f5..bc4001c6 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -65,7 +65,7 @@ public class MoviePlayer: ImageSource { var observations = [NSKeyValueObservation]() public init(asset: AVAsset, loop: Bool = false) throws { - print("movie player init \(asset)") + debugPrint("movie player init \(asset)") self.asset = asset self.loop = loop self.yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { @@ -201,12 +201,12 @@ private extension MoviePlayer { } func playerRateDidChange() { - print("rate change to:\(player.rate) asset:\(asset) status:\(player.status.rawValue)") + debugPrint("rate change to:\(player.rate) asset:\(asset) status:\(player.status.rawValue)") resumeIfNeeded() } func playerStatusDidChange() { - print("status change to:\(player.status.rawValue) asset:\(asset)") + debugPrint("status change to:\(player.status.rawValue) asset:\(asset)") resumeIfNeeded() } From a6f254a43970827eed8ccc24b4582398a4778f4d Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 22 Feb 2019 19:11:13 +0800 Subject: [PATCH 111/332] improve MoviePlayer seeking and looping --- framework/Source/iOS/MoviePlayer.swift | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index bc4001c6..a5e88b7f 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -92,7 +92,7 @@ public class MoviePlayer: ImageSource { deinit { debugPrint("movie player deinit \(asset)") - pause() + stop() movieFramebuffer?.unlock() observations.forEach { $0.invalidate() } NotificationCenter.default.removeObserver(self) @@ -104,11 +104,10 @@ public class MoviePlayer: ImageSource { public func start() { isPlaying = true debugPrint("movie player start \(asset)") - if displayLink != nil { - displayLink?.remove(from: RunLoop.main, forMode: .commonModes) + if displayLink == nil { + displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallback)) + displayLink?.add(to: RunLoop.main, forMode: .commonModes) } - displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallback)) - displayLink?.add(to: RunLoop.main, forMode: .commonModes) timeObserversQueue.removeAll() if let endTime = endTime { let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in @@ -130,10 +129,20 @@ public class MoviePlayer: ImageSource { seekToTime(startTime ?? 0, shouldPlayAfterSeeking: true) } + public func resume() { + isPlaying = true + player.rate = playrate + } + public func pause() { isPlaying = false debugPrint("movie player pause \(asset)") player.pause() + } + + public func stop() { + pause() + debugPrint("movie player stop \(asset)") timeObserversQueue.removeAll() displayLink?.remove(from: RunLoop.current, forMode: .commonModes) displayLink?.invalidate() @@ -141,10 +150,11 @@ public class MoviePlayer: ImageSource { } public func seekToTime(_ time: TimeInterval, shouldPlayAfterSeeking: Bool) { - player.seek(to: CMTime(seconds: time, preferredTimescale: 600)) { [weak self] success in + player.seek(to: CMTime(seconds: time, preferredTimescale: 600), toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero) { [weak self] success in print("movie player did seek to time:\(time) success:\(success)") guard let self = self else { return } if shouldPlayAfterSeeking { + self.isPlaying = true self.player.rate = self.playrate } } From 8632070ba5032155f9c9107e11772896030b4477 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 23 Feb 2019 17:05:47 +0800 Subject: [PATCH 112/332] MovieInput support trimming duration --- framework/Source/iOS/MovieInput.swift | 35 +++++++++++++++------------ 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index d6e4ff51..530d5630 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -32,14 +32,14 @@ public class MovieInput: ImageSource { let videoComposition:AVVideoComposition? var playAtActualSpeed:Bool - // Time in the video where it should start. + // Time in the video where it should start. It will be reset when looping. var requestedStartTime:CMTime? - // Time in the video where it should end. - var requestedDuration:CMTime? + // Time in the video where it should start for trimmed start. + var trimmedStartTime:CMTime? // Time in the video where it started. var startTime:CMTime? - // Duration of the video from startTime. - var durationFromStart:CMTime? + // Duration of the video from startTime for trimming. + var trimmedDuration:CMTime? // Time according to device clock when the video started. var actualStartTime:DispatchTime? // Last sample time that played. @@ -114,9 +114,14 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Playback control - public func start(atTime: CMTime, duration: CMTime? = nil) { - self.requestedStartTime = atTime - requestedDuration = duration + public func start(atTime: CMTime, duration: CMTime? = nil, isTrimming: Bool = false) { + if !isTrimming { + requestedStartTime = atTime + } else { + trimmedStartTime = atTime + trimmedDuration = duration + } + self.start() } @@ -175,16 +180,14 @@ public class MovieInput: ImageSource { } self.startTime = self.requestedStartTime - self.durationFromStart = self.requestedDuration - if let requestedStartTime = self.requestedStartTime { - if let requestedDuration = self.requestedDuration, requestedDuration.seconds > 0, CMTimeAdd(requestedStartTime, requestedDuration) <= asset.duration { - assetReader.timeRange = CMTimeRange(start: requestedStartTime, duration: requestedDuration) + if let startTime = self.requestedStartTime ?? self.trimmedStartTime { + if let trimmedDuration = self.trimmedDuration, trimmedDuration.seconds > 0, CMTimeAdd(startTime, trimmedDuration) <= asset.duration { + assetReader.timeRange = CMTimeRange(start: startTime, duration: trimmedDuration) } else { - assetReader.timeRange = CMTimeRange(start: requestedStartTime, duration: kCMTimePositiveInfinity) + assetReader.timeRange = CMTimeRange(start: startTime, duration: kCMTimePositiveInfinity) } } self.requestedStartTime = nil - self.requestedDuration = nil self.currentTime = nil self.actualStartTime = nil @@ -328,8 +331,8 @@ public class MovieInput: ImageSource { if let startTime = self.startTime { // Make sure our samples start at kCMTimeZero if the video was started midway. currentSampleTime = CMTimeSubtract(currentSampleTime, startTime) - if let durationFromStart = self.durationFromStart, durationFromStart.seconds > 0, CMTimeAdd(startTime, durationFromStart) <= duration { - duration = durationFromStart + if let trimmedDuration = self.trimmedDuration, startTime.seconds > 0, CMTimeAdd(startTime, trimmedDuration) <= duration { + duration = trimmedDuration } else { duration = CMTimeSubtract(duration, startTime) } From 41cded9ef9c8fd2e897e5e79fc973d1996a00605 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 23 Feb 2019 23:50:01 +0800 Subject: [PATCH 113/332] fix(MoviePlayer): fix seeking and refactor --- framework/Source/iOS/MoviePlayer.swift | 71 +++++++++++++++----------- 1 file changed, 41 insertions(+), 30 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index a5e88b7f..0f03b7d4 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -104,28 +104,8 @@ public class MoviePlayer: ImageSource { public func start() { isPlaying = true debugPrint("movie player start \(asset)") - if displayLink == nil { - displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallback)) - displayLink?.add(to: RunLoop.main, forMode: .commonModes) - } - timeObserversQueue.removeAll() - if let endTime = endTime { - let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in - if self?.loop == true { - self?.pause() - self?.start() - } else { - self?.pause() - } - } - timeObserversQueue.append(endTimeObserver) - } - for observer in totalTimeObservers { - guard observer.targetTime >= startTime ?? 0 else { - break - } - timeObserversQueue.append(observer) - } + _setupDisplayLinkIfNeeded() + _resetTimeObservers() seekToTime(startTime ?? 0, shouldPlayAfterSeeking: true) } @@ -150,14 +130,24 @@ public class MoviePlayer: ImageSource { } public func seekToTime(_ time: TimeInterval, shouldPlayAfterSeeking: Bool) { - player.seek(to: CMTime(seconds: time, preferredTimescale: 600), toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero) { [weak self] success in + let seekingCompletion = { [weak self] (success: Bool) in print("movie player did seek to time:\(time) success:\(success)") guard let self = self else { return } if shouldPlayAfterSeeking { + self._resetTimeObservers() self.isPlaying = true self.player.rate = self.playrate } } + + let targetTime = CMTime(seconds: time, preferredTimescale: 600) + if shouldPlayAfterSeeking { + // 0.1s has 3 frames tolerance for 30 FPS video, it should be enough if there is no sticky video + let toleranceTime = CMTime(seconds: 0.1, preferredTimescale: 600) + player.seek(to: targetTime, toleranceBefore: toleranceTime, toleranceAfter: kCMTimeZero, completionHandler: seekingCompletion) + } else { + player.seek(to: targetTime, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero, completionHandler: seekingCompletion) + } } public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { @@ -192,11 +182,11 @@ public class MoviePlayer: ImageSource { } private extension MoviePlayer { - // MARK: - - // MARK: Thread configuration - - func nanosToAbs(_ nanos: UInt64) -> UInt64 { - return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer) + func _setupDisplayLinkIfNeeded() { + if displayLink == nil { + displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallback)) + displayLink?.add(to: RunLoop.main, forMode: .commonModes) + } } func _setupObservers() { @@ -210,6 +200,27 @@ private extension MoviePlayer { NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) } + func _resetTimeObservers() { + timeObserversQueue.removeAll() + if let endTime = endTime { + let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in + if self?.loop == true && self?.isPlaying == true { + self?.pause() + self?.start() + } else { + self?.pause() + } + } + timeObserversQueue.append(endTimeObserver) + } + for observer in totalTimeObservers { + guard observer.targetTime >= startTime ?? 0 else { + break + } + timeObserversQueue.append(observer) + } + } + func playerRateDidChange() { debugPrint("rate change to:\(player.rate) asset:\(asset) status:\(player.status.rawValue)") resumeIfNeeded() @@ -228,7 +239,7 @@ private extension MoviePlayer { // MARK: - // MARK: Internal processing functions - func process(movieFrame: CVPixelBuffer, with sampleTime: CMTime) { + func _process(movieFrame: CVPixelBuffer, with sampleTime: CMTime) { delegate?.moviePlayerDidReadPixelBuffer(movieFrame, time: CMTimeGetSeconds(sampleTime)) let bufferHeight = CVPixelBufferGetHeight(movieFrame) @@ -331,7 +342,7 @@ private extension MoviePlayer { return } self._notifyTimeObserver(with: currentTime) - self.process(movieFrame: pixelBuffer, with: currentTime) + self._process(movieFrame: pixelBuffer, with: currentTime) } } } From 303c79f14e6fbf69fddb039aa8ffe29be2bf4ad2 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 25 Feb 2019 11:23:12 +0800 Subject: [PATCH 114/332] fix MoviePlayer retain cycle --- framework/Source/iOS/MoviePlayer.swift | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 0f03b7d4..e2d3d6e4 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -124,7 +124,6 @@ public class MoviePlayer: ImageSource { pause() debugPrint("movie player stop \(asset)") timeObserversQueue.removeAll() - displayLink?.remove(from: RunLoop.current, forMode: .commonModes) displayLink?.invalidate() displayLink = nil } @@ -334,7 +333,11 @@ private extension MoviePlayer { } @objc func displayLinkCallback(displayLink: CADisplayLink) { - sharedImageProcessingContext.runOperationAsynchronously { + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + guard let self = self else { + displayLink.invalidate() + return + } let currentTime = self.player.currentTime() if self.videoOutput.hasNewPixelBuffer(forItemTime: currentTime) { guard let pixelBuffer = self.videoOutput.copyPixelBuffer(forItemTime: currentTime, itemTimeForDisplay: nil) else { From 2fd2cf5aaaad3e282048b5e28ee0e3c937b6234c Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 2 Mar 2019 23:08:42 +0800 Subject: [PATCH 115/332] fix MoviePlayer seeking choppy --- framework/Source/iOS/MoviePlayer.swift | 48 +++++++++++++++++++------- 1 file changed, 36 insertions(+), 12 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index e2d3d6e4..183e75d2 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -64,6 +64,15 @@ public class MoviePlayer: ImageSource { var framebufferUserInfo: [AnyHashable:Any]? var observations = [NSKeyValueObservation]() + struct SeekingInfo { + let time: CMTime + let toleranceBefore: CMTime + let toleranceAfter: CMTime + let shouldPlayAfterSeeking: Bool + } + var nextSeeking: SeekingInfo? + var isSeeking: Bool = false + public init(asset: AVAsset, loop: Bool = false) throws { debugPrint("movie player init \(asset)") self.asset = asset @@ -116,6 +125,7 @@ public class MoviePlayer: ImageSource { public func pause() { isPlaying = false + guard player.rate != 0 else { return } debugPrint("movie player pause \(asset)") player.pause() } @@ -129,23 +139,37 @@ public class MoviePlayer: ImageSource { } public func seekToTime(_ time: TimeInterval, shouldPlayAfterSeeking: Bool) { - let seekingCompletion = { [weak self] (success: Bool) in - print("movie player did seek to time:\(time) success:\(success)") - guard let self = self else { return } - if shouldPlayAfterSeeking { - self._resetTimeObservers() - self.isPlaying = true - self.player.rate = self.playrate - } - } - let targetTime = CMTime(seconds: time, preferredTimescale: 600) if shouldPlayAfterSeeking { // 0.1s has 3 frames tolerance for 30 FPS video, it should be enough if there is no sticky video let toleranceTime = CMTime(seconds: 0.1, preferredTimescale: 600) - player.seek(to: targetTime, toleranceBefore: toleranceTime, toleranceAfter: kCMTimeZero, completionHandler: seekingCompletion) + nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: toleranceTime, toleranceAfter: toleranceTime, shouldPlayAfterSeeking: shouldPlayAfterSeeking) } else { - player.seek(to: targetTime, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero, completionHandler: seekingCompletion) + nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero, shouldPlayAfterSeeking: shouldPlayAfterSeeking) + } + actuallySeekToTime() + } + + func actuallySeekToTime() { + // Avoid seeking choppy when fast seeking + // https://developer.apple.com/library/archive/qa/qa1820/_index.html#//apple_ref/doc/uid/DTS40016828 + guard !isSeeking, let seekingInfo = nextSeeking else { return } + isSeeking = true + player.seek(to: seekingInfo.time, toleranceBefore:seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] success in + debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success)") + guard let self = self else { return } + if seekingInfo.shouldPlayAfterSeeking { + self._resetTimeObservers() + self.isPlaying = true + self.player.rate = self.playrate + } + + self.isSeeking = false + if seekingInfo.time != self.nextSeeking?.time { + self.actuallySeekToTime() + } else { + self.nextSeeking = nil + } } } From cb5034f2686d51dc357014e92fff1b3c8e1cce25 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 3 Mar 2019 16:46:37 +0800 Subject: [PATCH 116/332] fix MoviePlayer playAfterSeeking is not working --- framework/Source/iOS/MoviePlayer.swift | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 183e75d2..9e49bb82 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -64,11 +64,18 @@ public class MoviePlayer: ImageSource { var framebufferUserInfo: [AnyHashable:Any]? var observations = [NSKeyValueObservation]() - struct SeekingInfo { + struct SeekingInfo: Equatable { let time: CMTime let toleranceBefore: CMTime let toleranceAfter: CMTime let shouldPlayAfterSeeking: Bool + + public static func == (lhs: MoviePlayer.SeekingInfo, rhs: MoviePlayer.SeekingInfo) -> Bool { + return lhs.time.seconds == rhs.time.seconds + && lhs.toleranceBefore.seconds == rhs.toleranceBefore.seconds + && lhs.toleranceAfter.seconds == rhs.toleranceAfter.seconds + && lhs.shouldPlayAfterSeeking == rhs.shouldPlayAfterSeeking + } } var nextSeeking: SeekingInfo? var isSeeking: Bool = false @@ -121,6 +128,7 @@ public class MoviePlayer: ImageSource { public func resume() { isPlaying = true player.rate = playrate + debugPrint("movie player resume \(asset)") } public func pause() { @@ -156,7 +164,7 @@ public class MoviePlayer: ImageSource { guard !isSeeking, let seekingInfo = nextSeeking else { return } isSeeking = true player.seek(to: seekingInfo.time, toleranceBefore:seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] success in - debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success)") + debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success) shouldPlayAfterSeeking:\(seekingInfo.shouldPlayAfterSeeking)") guard let self = self else { return } if seekingInfo.shouldPlayAfterSeeking { self._resetTimeObservers() @@ -165,7 +173,7 @@ public class MoviePlayer: ImageSource { } self.isSeeking = false - if seekingInfo.time != self.nextSeeking?.time { + if seekingInfo != self.nextSeeking { self.actuallySeekToTime() } else { self.nextSeeking = nil From 6a7dfd673fa7304d1446d295684a36bf51a9052e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 12 Mar 2019 19:13:15 +0800 Subject: [PATCH 117/332] MovieOutput add startTime output callback --- framework/Source/iOS/MovieOutput.swift | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 412bf249..24334da6 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -7,6 +7,10 @@ public protocol AudioEncodingTarget { func readyForNextAudioBuffer() -> Bool } +public protocol MovieOutputDelegate: class { + func movieOutputDidStartWriting(_ movieOutput: MovieOutput, at time: CMTime) +} + public enum MovieOutputError: Error, CustomStringConvertible { case startWritingError(assetWriterError: Error?) case pixelBufferPoolNilError @@ -30,6 +34,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public let sources = SourceContainer() public let maximumInputs:UInt = 1 + public weak var delegate: MovieOutputDelegate? + let assetWriter:AVAssetWriter let assetWriterVideoInput:AVAssetWriterInput var assetWriterAudioInput:AVAssetWriterInput? @@ -242,6 +248,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if (self.previousFrameTime == nil) { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. self.assetWriter.startSession(atSourceTime: frameTime) + self.delegate?.movieOutputDidStartWriting(self, at: frameTime) } self.previousFrameTime = frameTime @@ -361,6 +368,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if (self.previousFrameTime == nil) { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. self.assetWriter.startSession(atSourceTime: frameTime) + self.delegate?.movieOutputDidStartWriting(self, at: frameTime) } self.previousFrameTime = frameTime From bad17054b7ec8cb46c986d795557fd46f8bb950d Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 26 Mar 2019 17:33:49 +0800 Subject: [PATCH 118/332] chore: migration to swift 5.0 --- README.md | 4 +- framework/Source/Matrix.swift | 4 +- .../Operations/HarrisCornerDetector.swift | 2 +- framework/Source/Operations/Histogram.swift | 2 +- framework/Source/Pipeline.swift | 14 ++-- framework/Source/SerialDispatch.swift | 10 +-- framework/Source/iOS/Camera.swift | 69 ++++++++++--------- framework/Source/iOS/MovieInput.swift | 12 ++-- framework/Source/iOS/MovieOutput.swift | 16 ++--- framework/Source/iOS/MoviePlayer.swift | 6 +- framework/Source/iOS/OpenGLContext.swift | 2 +- framework/Source/iOS/PictureInput.swift | 2 +- framework/Source/iOS/PictureOutput.swift | 12 ++-- framework/Source/iOS/RenderView.swift | 2 +- framework/Source/iOS/SpeakerOutput.swift | 4 +- 15 files changed, 81 insertions(+), 80 deletions(-) diff --git a/README.md b/README.md index de4f9270..c231f7d7 100755 --- a/README.md +++ b/README.md @@ -226,8 +226,8 @@ let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! let asset = AVURLAsset(url:movieURL, options:[AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)]) -guard let videoTrack = asset.tracks(withMediaType:AVMediaType.video).first else { return } -let audioTrack = asset.tracks(withMediaType:AVMediaType.audio).first +guard let videoTrack = asset.tracks(withMediaType:.video).first else { return } +let audioTrack = asset.tracks(withMediaType:.audio).first // If you would like passthrough audio instead, set both audioDecodingSettings and audioEncodingSettings to nil let audioDecodingSettings:[String:Any] = [AVFormatIDKey:kAudioFormatLinearPCM] // Noncompressed audio samples diff --git a/framework/Source/Matrix.swift b/framework/Source/Matrix.swift index 475fff5b..ed563fdf 100644 --- a/framework/Source/Matrix.swift +++ b/framework/Source/Matrix.swift @@ -95,7 +95,7 @@ func orthographicMatrix(_ left:Float, right:Float, bottom:Float, top:Float, near #if !os(Linux) public extension Matrix4x4 { - public init (_ transform3D:CATransform3D) { + init (_ transform3D:CATransform3D) { self.m11 = Float(transform3D.m11) self.m12 = Float(transform3D.m12) self.m13 = Float(transform3D.m13) @@ -117,7 +117,7 @@ public extension Matrix4x4 { self.m44 = Float(transform3D.m44) } - public init (_ transform:CGAffineTransform) { + init (_ transform:CGAffineTransform) { self.init(CATransform3DMakeAffineTransform(transform)) } } diff --git a/framework/Source/Operations/HarrisCornerDetector.swift b/framework/Source/Operations/HarrisCornerDetector.swift index fa067b55..abb031cf 100644 --- a/framework/Source/Operations/HarrisCornerDetector.swift +++ b/framework/Source/Operations/HarrisCornerDetector.swift @@ -88,7 +88,7 @@ func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { currentByte += 4 } - rawImagePixels.deallocate(capacity:imageByteSize) + rawImagePixels.deallocate() // print("Harris extraction frame time: \(CFAbsoluteTimeGetCurrent() - startTime)") diff --git a/framework/Source/Operations/Histogram.swift b/framework/Source/Operations/Histogram.swift index 6fa49515..5e2c7e98 100755 --- a/framework/Source/Operations/Histogram.swift +++ b/framework/Source/Operations/Histogram.swift @@ -84,6 +84,6 @@ public class Histogram: BasicOperation { } disableBlending() - data.deallocate(capacity:inputByteSize) + data.deallocate() } } diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 911317e1..a9265970 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -31,7 +31,7 @@ infix operator --> : AdditionPrecedence // MARK: Extensions and supporting types public extension ImageSource { - public func addTarget(_ target:ImageConsumer, atTargetIndex:UInt? = nil) { + func addTarget(_ target:ImageConsumer, atTargetIndex:UInt? = nil) { if let targetIndex = atTargetIndex { target.setSource(self, atIndex:targetIndex) targets.append(target, indexAtTarget:targetIndex) @@ -48,14 +48,14 @@ public extension ImageSource { } } - public func removeAllTargets() { + func removeAllTargets() { for (target, index) in targets { target.removeSourceAtIndex(index) } targets.removeAll() } - public func remove(_ target:ImageConsumer) { + func remove(_ target:ImageConsumer) { for (testTarget, index) in targets { if(target === testTarget) { target.removeSourceAtIndex(index) @@ -64,7 +64,7 @@ public extension ImageSource { } } - public func updateTargetsWithFramebuffer(_ framebuffer:Framebuffer) { + func updateTargetsWithFramebuffer(_ framebuffer:Framebuffer) { var foundTargets = [(ImageConsumer, UInt)]() for target in targets { foundTargets.append(target) @@ -86,15 +86,15 @@ public extension ImageSource { } public extension ImageConsumer { - public func addSource(_ source:ImageSource) -> UInt? { + func addSource(_ source:ImageSource) -> UInt? { return sources.append(source, maximumInputs:maximumInputs) } - public func setSource(_ source:ImageSource, atIndex:UInt) { + func setSource(_ source:ImageSource, atIndex:UInt) { _ = sources.insert(source, atIndex:atIndex, maximumInputs:maximumInputs) } - public func removeSourceAtIndex(_ index:UInt) { + func removeSourceAtIndex(_ index:UInt) { sources.removeAtIndex(index) } } diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index f87aec8c..6c3c5ceb 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -70,14 +70,14 @@ public protocol SerialDispatch { } public extension SerialDispatch { - public func runOperationAsynchronously(_ operation:@escaping () -> ()) { + func runOperationAsynchronously(_ operation:@escaping () -> ()) { self.serialDispatchQueue.async { self.makeCurrentContext() operation() } } - public func runOperationSynchronously(_ operation:() -> ()) { + func runOperationSynchronously(_ operation:() -> ()) { // TODO: Verify this works as intended if (DispatchQueue.getSpecific(key:self.dispatchQueueKey) == self.dispatchQueueKeyValue) { operation() @@ -89,7 +89,7 @@ public extension SerialDispatch { } } - public func runOperationSynchronously(_ operation:() throws -> ()) throws { + func runOperationSynchronously(_ operation:() throws -> ()) throws { var caughtError:Error? = nil runOperationSynchronously { do { @@ -101,7 +101,7 @@ public extension SerialDispatch { if (caughtError != nil) {throw caughtError!} } - public func runOperationSynchronously(_ operation:() throws -> T) throws -> T { + func runOperationSynchronously(_ operation:() throws -> T) throws -> T { var returnedValue: T! try runOperationSynchronously { returnedValue = try operation() @@ -109,7 +109,7 @@ public extension SerialDispatch { return returnedValue } - public func runOperationSynchronously(_ operation:() -> T) -> T { + func runOperationSynchronously(_ operation:() -> T) -> T { var returnedValue: T! runOperationSynchronously { returnedValue = operation() diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 796bdf94..d4a9b964 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -28,7 +28,7 @@ public enum PhysicalCameraLocation { } } - func captureDevicePosition() -> AVCaptureDevicePosition { + func captureDevicePosition() -> AVCaptureDevice.Position { switch self { case .backFacing: return .back case .frontFacing: return .front @@ -37,14 +37,14 @@ public enum PhysicalCameraLocation { } public func device() -> AVCaptureDevice? { - let devices = AVCaptureDevice.devices(withMediaType:AVMediaTypeVideo) - for case let device as AVCaptureDevice in devices! { + let devices = AVCaptureDevice.devices(for: .video) + for device in devices { if (device.position == self.captureDevicePosition()) { return device } } - return AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeVideo) + return AVCaptureDevice.default(for: .video) } } @@ -141,7 +141,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var captureSessionRestartAttempts = 0 - public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil) throws { + public init(sessionPreset:AVCaptureSession.Preset, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil) throws { debugPrint("camera init") @@ -185,23 +185,23 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if captureAsYUV { supportsFullYUVRange = false - let supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes - for currentPixelFormat in supportedPixelFormats! { - if ((currentPixelFormat as! NSNumber).int32Value == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { + let supportedPixelFormats = videoOutput.availableVideoPixelFormatTypes + for currentPixelFormat in supportedPixelFormats { + if currentPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange { supportsFullYUVRange = true } } if (supportsFullYUVRange) { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] as? [String : Any] } else { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] as? [String : Any] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] as? [String : Any] } if (captureSession.canAddOutput(videoOutput)) { @@ -221,7 +221,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.addOutput(captureMetadataOutput) captureMetadataOutput.setMetadataObjectsDelegate(metadataDelegate, queue: cameraProcessingQueue) - captureMetadataOutput.metadataObjectTypes = [AVMetadataObjectTypeQRCode] + captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr] } } @@ -275,7 +275,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - func captureSessionRuntimeError(note: NSNotification) { + @objc func captureSessionRuntimeError(note: NSNotification) { print("ERROR: Capture session runtime error: \(String(describing: note.userInfo))") if(self.captureSessionRestartAttempts < 1) { DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { @@ -285,7 +285,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - func captureSessionDidStartRunning(note: NSNotification) { + @objc func captureSessionDidStartRunning(note: NSNotification) { self.captureSessionRestartAttempts = 0 } @@ -423,16 +423,19 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer defer { captureSession.commitConfiguration() } - microphone = AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeAudio) + microphone = AVCaptureDevice.default(for: .audio) + guard let microphone = microphone else { return } audioInput = try AVCaptureDeviceInput(device:microphone) + guard let audioInput = audioInput else { return } if captureSession.canAddInput(audioInput) { captureSession.addInput(audioInput) } - audioOutput = AVCaptureAudioDataOutput() - if captureSession.canAddOutput(audioOutput) { - captureSession.addOutput(audioOutput) + let output = AVCaptureAudioDataOutput() + if captureSession.canAddOutput(output) { + captureSession.addOutput(output) } - audioOutput?.setSampleBufferDelegate(self, queue:audioProcessingQueue) + output.setSampleBufferDelegate(self, queue:audioProcessingQueue) + audioOutput = output } public func removeAudioInputsAndOutputs() { @@ -454,22 +457,20 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer private extension Camera { static func updateVideoOutput(location: PhysicalCameraLocation, videoOutput: AVCaptureOutput, stableMode: AVCaptureVideoStabilizationMode = .standard) { - if let connections = videoOutput.connections as? [AVCaptureConnection] { - for connection in connections { - if connection.isVideoMirroringSupported { - connection.isVideoMirrored = (location == .frontFacingMirrored) - } - - if connection.isVideoOrientationSupported { - connection.videoOrientation = .portrait - } - - if connection.isVideoStabilizationSupported { - connection.preferredVideoStabilizationMode = stableMode - } - - print("isVideoStabilizationSupported: \(connection.isVideoStabilizationSupported), activeVideoStabilizationMode: \(connection.activeVideoStabilizationMode.rawValue)") + for connection in videoOutput.connections { + if connection.isVideoMirroringSupported { + connection.isVideoMirrored = (location == .frontFacingMirrored) } + + if connection.isVideoOrientationSupported { + connection.videoOrientation = .portrait + } + + if connection.isVideoStabilizationSupported { + connection.preferredVideoStabilizationMode = stableMode + } + + print("isVideoStabilizationSupported: \(connection.isVideoStabilizationSupported), activeVideoStabilizationMode: \(connection.activeVideoStabilizationMode.rawValue)") } } } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 530d5630..64655611 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -161,18 +161,18 @@ public class MovieInput: ImageSource { let assetReader = try AVAssetReader.init(asset: self.asset) if(self.videoComposition == nil) { - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: AVMediaTypeVideo).first!, outputSettings:outputSettings) + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: .video).first!, outputSettings:outputSettings) readerVideoTrackOutput.alwaysCopiesSampleData = false assetReader.add(readerVideoTrackOutput) } else { - let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: AVMediaTypeVideo), videoSettings: outputSettings) + let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: .video), videoSettings: outputSettings) readerVideoTrackOutput.videoComposition = self.videoComposition readerVideoTrackOutput.alwaysCopiesSampleData = false assetReader.add(readerVideoTrackOutput) } - if let audioTrack = self.asset.tracks(withMediaType: AVMediaTypeAudio).first, + if let audioTrack = self.asset.tracks(withMediaType: .audio).first, let _ = self.audioEncodingTarget { let readerAudioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: audioSettings) readerAudioTrackOutput.alwaysCopiesSampleData = false @@ -184,7 +184,7 @@ public class MovieInput: ImageSource { if let trimmedDuration = self.trimmedDuration, trimmedDuration.seconds > 0, CMTimeAdd(startTime, trimmedDuration) <= asset.duration { assetReader.timeRange = CMTimeRange(start: startTime, duration: trimmedDuration) } else { - assetReader.timeRange = CMTimeRange(start: startTime, duration: kCMTimePositiveInfinity) + assetReader.timeRange = CMTimeRange(start: startTime, duration: .positiveInfinity) } } self.requestedStartTime = nil @@ -244,10 +244,10 @@ public class MovieInput: ImageSource { var readerAudioTrackOutput:AVAssetReaderOutput? = nil for output in assetReader.outputs { - if(output.mediaType == AVMediaTypeVideo) { + if(output.mediaType == .video) { readerVideoTrackOutput = output } - if(output.mediaType == AVMediaTypeAudio) { + if(output.mediaType == .audio) { readerAudioTrackOutput = output } } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 24334da6..af4f0b12 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -71,7 +71,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { debugPrint("movie output deinit \(assetWriter.outputURL)") } - public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil, keepLastPixelBuffer: Bool = false) throws { + public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil, keepLastPixelBuffer: Bool = false) throws { debugPrint("movie output init \(URL)") @@ -99,7 +99,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? size.height localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 - assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) + assetWriterVideoInput = AVAssetWriterInput(mediaType:.video, outputSettings:localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo // You should provide a naturalTimeScale if you have one for the current media. @@ -108,10 +108,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriter.movieTimeScale = naturalTimeScale assetWriterVideoInput.mediaTimeScale = naturalTimeScale // This is set to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. - assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1, naturalTimeScale) + assetWriter.movieFragmentInterval = CMTime(seconds: 1, preferredTimescale: naturalTimeScale) } else { - assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1, 1000) + assetWriter.movieFragmentInterval = CMTime(seconds: 1, preferredTimescale: 1000) } encodingLiveVideo = liveVideo @@ -416,7 +416,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Audio support public func activateAudioTrack() { - assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:self.audioSettings, sourceFormatHint:self.audioSourceFormatHint) + assetWriterAudioInput = AVAssetWriterInput(mediaType:.audio, outputSettings:self.audioSettings, sourceFormatHint:self.audioSourceFormatHint) assetWriter.add(assetWriterAudioInput!) assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo @@ -500,16 +500,16 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public extension Timestamp { - public init(_ time:CMTime) { + init(_ time:CMTime) { self.value = time.value self.timescale = time.timescale self.flags = TimestampFlags(rawValue:time.flags.rawValue) self.epoch = time.epoch } - public var asCMTime:CMTime { + var asCMTime:CMTime { get { - return CMTimeMakeWithEpoch(value, timescale, epoch) + return CMTimeMakeWithEpoch(value: value, timescale: timescale, epoch: epoch) } } } diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 9e49bb82..8fed0cc4 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -95,7 +95,7 @@ public class MoviePlayer: ImageSource { playerItem = AVPlayerItem(asset: asset) playerItem.add(videoOutput) - playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed + playerItem.audioTimePitchAlgorithm = .varispeed player = AVPlayer(playerItem: playerItem) _setupObservers() } @@ -153,7 +153,7 @@ public class MoviePlayer: ImageSource { let toleranceTime = CMTime(seconds: 0.1, preferredTimescale: 600) nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: toleranceTime, toleranceAfter: toleranceTime, shouldPlayAfterSeeking: shouldPlayAfterSeeking) } else { - nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: kCMTimeZero, toleranceAfter: kCMTimeZero, shouldPlayAfterSeeking: shouldPlayAfterSeeking) + nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: .zero, toleranceAfter: .zero, shouldPlayAfterSeeking: shouldPlayAfterSeeking) } actuallySeekToTime() } @@ -216,7 +216,7 @@ private extension MoviePlayer { func _setupDisplayLinkIfNeeded() { if displayLink == nil { displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallback)) - displayLink?.add(to: RunLoop.main, forMode: .commonModes) + displayLink?.add(to: RunLoop.main, forMode: .common) } } diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index e490cbeb..b99d08e5 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -79,7 +79,7 @@ public class OpenGLContext: SerialDispatch { // MARK: Device capabilities func supportsTextureCaches() -> Bool { -#if (arch(i386) || arch(x86_64)) && os(iOS) +#if targetEnvironment(simulator) return false // Simulator glitches out on use of texture caches #else return true // Every iOS version and device that can run Swift can handle texture caches diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 71db1e83..909581fc 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -131,7 +131,7 @@ public class PictureInput: ImageSource { } if (shouldRedrawUsingCoreGraphics) { - imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData.deallocate() } } diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 5c73a6df..eacf321b 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -94,8 +94,8 @@ public class PictureOutput: ImageConsumer { let image = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) let imageData:Data switch encodedImageFormat { - case .png: imageData = UIImagePNGRepresentation(image)! // TODO: Better error handling here - case .jpeg: imageData = UIImageJPEGRepresentation(image, encodedJPEGImageCompressionQuality)! + case .png: imageData = image.pngData()! // TODO: Better error handling here + case .jpeg: imageData = image.jpegData(compressionQuality: encodedJPEGImageCompressionQuality)! } imageCallback(imageData) @@ -120,7 +120,7 @@ public class PictureOutput: ImageConsumer { } public extension ImageSource { - public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { + func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { let pictureOutput = PictureOutput() pictureOutput.saveNextFrameToURL(url, format:format) self --> pictureOutput @@ -128,13 +128,13 @@ public extension ImageSource { } public extension UIImage { - public func filterWithOperation(_ operation:T) throws -> UIImage { + func filterWithOperation(_ operation:T) throws -> UIImage { return try filterWithPipeline{input, output in input --> operation --> output } } - public func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) throws -> UIImage { + func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) throws -> UIImage { let picture = try PictureInput(image:self) var outputImage:UIImage? let pictureOutput = PictureOutput() @@ -150,5 +150,5 @@ public extension UIImage { // Why are these flipped in the callback definition? func dataProviderReleaseCallback(_ context:UnsafeMutableRawPointer?, data:UnsafeRawPointer, size:Int) { - data.deallocate(bytes:size, alignedTo:1) + data.deallocate() } diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 5606dd92..c6f442b3 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -65,7 +65,7 @@ public class RenderView:UIView, ImageConsumer { let eaglLayer = self.layer as! CAEAGLLayer eaglLayer.isOpaque = true eaglLayer.drawableProperties = [kEAGLDrawablePropertyRetainedBacking: NSNumber(value:false), kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8] - eaglLayer.contentsGravity = kCAGravityResizeAspectFill // Just for safety to prevent distortion + eaglLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill // Just for safety to prevent distortion self.internalLayer = eaglLayer } diff --git a/framework/Source/iOS/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift index 499d4763..caa1ebc4 100644 --- a/framework/Source/iOS/SpeakerOutput.swift +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -96,7 +96,7 @@ public class SpeakerOutput: AudioEncodingTarget { public func activateAudioTrack() { if(changesAudioSession) { do { - try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryAmbient) + try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.ambient) try AVAudioSession.sharedInstance().setActive(true) } catch { @@ -217,7 +217,7 @@ public class SpeakerOutput: AudioEncodingTarget { // Populate an AudioBufferList with the sample var audioBufferList = AudioBufferList() var blockBuffer:CMBlockBuffer? - CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, nil, &audioBufferList, MemoryLayout.size, nil, nil, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer) + CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, bufferListSizeNeededOut: nil, bufferListOut: &audioBufferList, bufferListSize: MemoryLayout.size, blockBufferAllocator: nil, blockBufferMemoryAllocator: nil, flags: kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, blockBufferOut: &blockBuffer) // This is actually doing audioBufferList.mBuffers[0] // Since the struct has an array of length of 1 the compiler is interpreting From 641c551092d773c7711ee9af3767b4de8d0fa0fc Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 27 Mar 2019 12:43:46 +0800 Subject: [PATCH 119/332] fix(camera): fix sampleBuffer delegate and upgrade project settings --- .../SimpleVideoRecorder.xcodeproj/project.pbxproj | 8 ++++---- .../SimpleVideoRecorder/ViewController.swift | 8 ++++---- framework/GPUImage.xcodeproj/project.pbxproj | 8 ++++---- framework/Source/Mac/Camera.swift | 4 ++-- framework/Source/iOS/Camera.swift | 9 +++++---- 5 files changed, 19 insertions(+), 18 deletions(-) diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj index c977e0c6..90e3466c 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj @@ -322,7 +322,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -362,7 +362,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; @@ -379,7 +379,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoRecorder; PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 5.0; }; name = Debug; }; @@ -392,7 +392,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoRecorder; PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 5.0; }; name = Release; }; diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift index ad950f26..5b3c3c69 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift @@ -13,7 +13,7 @@ class ViewController: UIViewController { super.viewDidLoad() do { - camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) + camera = try Camera(sessionPreset:AVCaptureSession.Preset.vga640x480) camera.runBenchmark = true filter = SaturationAdjustment() camera --> filter --> renderView @@ -45,15 +45,15 @@ class ViewController: UIViewController { fatalError("ERROR: Could not connect audio target with error: \(error)") } - let audioSettings = self.camera!.audioOutput?.recommendedAudioSettingsForAssetWriter(withOutputFileType:AVFileTypeMPEG4) as? [String : Any] + let audioSettings = self.camera!.audioOutput?.recommendedAudioSettingsForAssetWriter(writingTo:AVFileType.mp4) as? [String : Any] var videoSettings:[String : Any]? = nil if #available(iOS 11.0, *) { - videoSettings = self.camera!.videoOutput.recommendedVideoSettings(forVideoCodecType:.h264, assetWriterOutputFileType:AVFileTypeMPEG4) as? [String : Any] + videoSettings = self.camera!.videoOutput.recommendedVideoSettings(forVideoCodecType:.h264, assetWriterOutputFileType:AVFileType.mp4) as? [String : Any] videoSettings![AVVideoWidthKey] = nil videoSettings![AVVideoHeightKey] = nil } - movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), fileType:AVFileTypeMPEG4, liveVideo:true, videoSettings:videoSettings, audioSettings:audioSettings) + movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), fileType:AVFileType.mp4, liveVideo:true, videoSettings:videoSettings, audioSettings:audioSettings) camera.audioEncodingTarget = movieOutput filter --> movieOutput! movieOutput!.startRecording() { started, error in diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index 8bd09e64..9c786b21 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -1822,7 +1822,7 @@ GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; INSTALL_PATH = /Library/Frameworks; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; MACOSX_DEPLOYMENT_TARGET = 10.9; ONLY_ACTIVE_ARCH = YES; SDKROOT = macosx; @@ -1867,7 +1867,7 @@ GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; INSTALL_PATH = /Library/Frameworks; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; MACOSX_DEPLOYMENT_TARGET = 10.9; SDKROOT = macosx; SKIP_INSTALL = YES; @@ -1973,7 +1973,7 @@ SKIP_INSTALL = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; @@ -2003,7 +2003,7 @@ SDKROOT = iphoneos; SKIP_INSTALL = YES; SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index b8f02ae6..accf294a 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -107,8 +107,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) } } - - public func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) { + + public func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard (captureOutput != audioOutput) else { self.processAudioSampleBuffer(sampleBuffer) return diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index d4a9b964..8de994f7 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -150,6 +150,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.captureSession = AVCaptureSession() self.captureSession.beginConfiguration() + captureSession.sessionPreset = sessionPreset if let cameraDevice = cameraDevice { self.inputCamera = cameraDevice @@ -194,14 +195,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if (supportsFullYUVRange) { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] as? [String : Any] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] } else { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] as? [String : Any] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] as? [String : Any] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA] } if (captureSession.canAddOutput(videoOutput)) { @@ -289,7 +290,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.captureSessionRestartAttempts = 0 } - public func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) { + public func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard (captureOutput != audioOutput) else { self.processAudioSampleBuffer(sampleBuffer) return From 94e78e036354cdd861feb94aeadd630b558e1952 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 1 Apr 2019 10:56:46 +0800 Subject: [PATCH 120/332] make MovieOutput.startRecording sync-able --- framework/Source/iOS/MovieOutput.swift | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index af4f0b12..286a312a 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -131,11 +131,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.keepLastPixelBuffer = keepLastPixelBuffer } - public func startRecording(_ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { + public func startRecording(sync: Bool = false, _ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { // Don't do this work on the movieProcessingContext queue so we don't block it. // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. - DispatchQueue.global(qos: .utility).async { + let block = { () -> Void in do { var success = false try NSObject.catchException { @@ -148,16 +148,16 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { guard self.assetWriterPixelBufferInput.pixelBufferPool != nil else { /* - When the pixelBufferPool returns nil, check the following: - 1. the the output file of the AVAssetsWriter doesn't exist. - 2. use the pixelbuffer after calling startSessionAtTime: on the AVAssetsWriter. - 3. the settings of AVAssetWriterInput and AVAssetWriterInputPixelBufferAdaptor are correct. - 4. the present times of appendPixelBuffer uses are not the same. - https://stackoverflow.com/a/20110179/1275014 - */ + When the pixelBufferPool returns nil, check the following: + 1. the the output file of the AVAssetsWriter doesn't exist. + 2. use the pixelbuffer after calling startSessionAtTime: on the AVAssetsWriter. + 3. the settings of AVAssetWriterInput and AVAssetWriterInputPixelBufferAdaptor are correct. + 4. the present times of appendPixelBuffer uses are not the same. + https://stackoverflow.com/a/20110179/1275014 + */ throw MovieOutputError.pixelBufferPoolNilError } - + self.isRecording = true self.synchronizedEncodingDebugPrint("MovieOutput started writing") @@ -169,6 +169,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { completionCallback?(false, error) } } + + if sync { + block() + } else { + DispatchQueue.global(qos: .userInitiated).async(execute: block) + } } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { From de6614edb92c30b080b72b16498f213b45bdba25 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 2 Apr 2019 18:20:11 +0800 Subject: [PATCH 121/332] fix MoviePlayer time observers are not triggered correctly --- framework/Source/iOS/MoviePlayer.swift | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 8fed0cc4..4781458b 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -233,6 +233,12 @@ private extension MoviePlayer { func _resetTimeObservers() { timeObserversQueue.removeAll() + for observer in totalTimeObservers { + guard observer.targetTime >= startTime ?? 0 && observer.targetTime <= endTime ?? asset.duration.seconds else { + continue + } + timeObserversQueue.append(observer) + } if let endTime = endTime { let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in if self?.loop == true && self?.isPlaying == true { @@ -242,13 +248,8 @@ private extension MoviePlayer { self?.pause() } } - timeObserversQueue.append(endTimeObserver) - } - for observer in totalTimeObservers { - guard observer.targetTime >= startTime ?? 0 else { - break - } - timeObserversQueue.append(observer) + let insertIndex: Int = timeObserversQueue.reversed().firstIndex { endTime < $0.targetTime } ?? 0 + timeObserversQueue.insert(endTimeObserver, at: insertIndex) } } From 415e233d3e68341e0f63d9c353b2f0d9ea14a204 Mon Sep 17 00:00:00 2001 From: sunday37zhiyi Date: Wed, 3 Apr 2019 12:11:36 +0800 Subject: [PATCH 122/332] improve(player): expose player volume as public --- framework/Source/iOS/MoviePlayer.swift | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 4781458b..a4cf20f0 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -59,6 +59,11 @@ public class MoviePlayer: ImageSource { player.isMuted = isMuted } } + public var volume: Float { + get { return player.volume } + set { player.volume = newValue } + } + var movieFramebuffer: Framebuffer? var framebufferUserInfo: [AnyHashable:Any]? From d6890c69b42c75808372ae371456285dad43b756 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 11 Apr 2019 18:29:10 +0800 Subject: [PATCH 123/332] guard player status is readyToPlay and remove some debug log --- framework/Source/iOS/MoviePlayer.swift | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index a4cf20f0..1f08bc2b 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -166,10 +166,10 @@ public class MoviePlayer: ImageSource { func actuallySeekToTime() { // Avoid seeking choppy when fast seeking // https://developer.apple.com/library/archive/qa/qa1820/_index.html#//apple_ref/doc/uid/DTS40016828 - guard !isSeeking, let seekingInfo = nextSeeking else { return } + guard !isSeeking, let seekingInfo = nextSeeking, player.status == .readyToPlay else { return } isSeeking = true player.seek(to: seekingInfo.time, toleranceBefore:seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] success in - debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success) shouldPlayAfterSeeking:\(seekingInfo.shouldPlayAfterSeeking)") +// debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success) shouldPlayAfterSeeking:\(seekingInfo.shouldPlayAfterSeeking)") guard let self = self else { return } if seekingInfo.shouldPlayAfterSeeking { self._resetTimeObservers() @@ -259,7 +259,7 @@ private extension MoviePlayer { } func playerRateDidChange() { - debugPrint("rate change to:\(player.rate) asset:\(asset) status:\(player.status.rawValue)") +// debugPrint("rate change to:\(player.rate) asset:\(asset) status:\(player.status.rawValue)") resumeIfNeeded() } From a91f735366f08e0508d226d784f31109bd33cd02 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 13 Apr 2019 18:16:55 +0800 Subject: [PATCH 124/332] MoviePlayer support reusable with new playerItem --- framework/Source/iOS/MoviePlayer.swift | 144 +++++++++++++++++-------- 1 file changed, 97 insertions(+), 47 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 1f08bc2b..9f81de79 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -29,16 +29,15 @@ public class MoviePlayer: ImageSource { public var runBenchmark = false public var logEnabled = false public weak var delegate: MoviePlayerDelegate? - public let asset: AVAsset public let player: AVPlayer - public var isPlaying = false - public var startTime: TimeInterval? public var endTime: TimeInterval? - public var loop: Bool + public var loop: Bool = false + public private(set) var asset: AVAsset? + public private(set) var isPlaying = false - let playerItem: AVPlayerItem - let videoOutput: AVPlayerItemVideoOutput + private(set) var playerItem: AVPlayerItem? + var videoOutput: AVPlayerItemVideoOutput? var displayLink: CADisplayLink? let yuvConversionShader: ShaderProgram @@ -49,11 +48,7 @@ public class MoviePlayer: ImageSource { var timebaseInfo = mach_timebase_info_data_t() var totalFramesSent = 0 var totalFrameTime: Double = 0.0 - public var playrate: Float = 1.0 { - didSet { - player.rate = playrate - } - } + public var playrate: Float = 1.0 public var isMuted: Bool = false { didSet { player.isMuted = isMuted @@ -63,7 +58,12 @@ public class MoviePlayer: ImageSource { get { return player.volume } set { player.volume = newValue } } - + public var assetDuration: TimeInterval { + return asset?.duration.seconds ?? 0 + } + public var isReadyToPlay: Bool { + return player.status == .readyToPlay + } var movieFramebuffer: Framebuffer? var framebufferUserInfo: [AnyHashable:Any]? @@ -85,46 +85,67 @@ public class MoviePlayer: ImageSource { var nextSeeking: SeekingInfo? var isSeeking: Bool = false - public init(asset: AVAsset, loop: Bool = false) throws { - debugPrint("movie player init \(asset)") - self.asset = asset - self.loop = loop + public init() { + debugPrint("movie player init") self.yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } - - let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] - videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) - videoOutput.suppressesPlayerRendering = true - - playerItem = AVPlayerItem(asset: asset) - playerItem.add(videoOutput) - playerItem.audioTimePitchAlgorithm = .varispeed - player = AVPlayer(playerItem: playerItem) - _setupObservers() - } - - public convenience init(url: URL, loop: Bool = false) throws { - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] - let inputAsset = AVURLAsset(url: url, options: inputOptions) - try self.init(asset: inputAsset, loop: loop) + // Make sure player it intialized on the main thread, or it might cause KVO crash + assert(Thread.isMainThread) + player = AVQueuePlayer(playerItem: nil) + NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) } deinit { - debugPrint("movie player deinit \(asset)") + debugPrint("movie player deinit \(String(describing: asset))") stop() movieFramebuffer?.unlock() observations.forEach { $0.invalidate() } + observations.removeAll() NotificationCenter.default.removeObserver(self) } + // MARK: Data Source + public func setupPlayer(url: URL) { + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] + let inputAsset = AVURLAsset(url: url, options: inputOptions) + setupPlayer(asset: inputAsset) + } + + public func setupPlayer(asset: AVAsset) { + if isPlaying { + stop() + } + + self.videoOutput.map { self.playerItem?.remove($0) } + + let playerItem = AVPlayerItem(asset: asset) + let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] + let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) + videoOutput.suppressesPlayerRendering = true + playerItem.add(videoOutput) + playerItem.audioTimePitchAlgorithm = .varispeed + + self.asset = asset + self.playerItem = playerItem + self.videoOutput = videoOutput + player.replaceCurrentItem(with: playerItem) + _setupPlayerObservers() + } + // MARK: - // MARK: Playback control public func start() { + guard playerItem != nil else { + assert(playerItem != nil) + debugPrint("ERROR! player hasn't been setup before starting") + return + } isPlaying = true - debugPrint("movie player start \(asset)") + debugPrint("movie player start \(String(describing: asset))") _setupDisplayLinkIfNeeded() _resetTimeObservers() seekToTime(startTime ?? 0, shouldPlayAfterSeeking: true) @@ -133,22 +154,24 @@ public class MoviePlayer: ImageSource { public func resume() { isPlaying = true player.rate = playrate - debugPrint("movie player resume \(asset)") + debugPrint("movie player resume \(String(describing: asset))") } public func pause() { isPlaying = false guard player.rate != 0 else { return } - debugPrint("movie player pause \(asset)") + debugPrint("movie player pause \(String(describing: asset))") player.pause() } public func stop() { pause() - debugPrint("movie player stop \(asset)") + debugPrint("movie player stop \(String(describing: asset))") timeObserversQueue.removeAll() displayLink?.invalidate() displayLink = nil + isSeeking = false + nextSeeking = nil } public func seekToTime(_ time: TimeInterval, shouldPlayAfterSeeking: Bool) { @@ -166,7 +189,7 @@ public class MoviePlayer: ImageSource { func actuallySeekToTime() { // Avoid seeking choppy when fast seeking // https://developer.apple.com/library/archive/qa/qa1820/_index.html#//apple_ref/doc/uid/DTS40016828 - guard !isSeeking, let seekingInfo = nextSeeking, player.status == .readyToPlay else { return } + guard !isSeeking, let seekingInfo = nextSeeking, isReadyToPlay else { return } isSeeking = true player.seek(to: seekingInfo.time, toleranceBefore:seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] success in // debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success) shouldPlayAfterSeeking:\(seekingInfo.shouldPlayAfterSeeking)") @@ -178,6 +201,7 @@ public class MoviePlayer: ImageSource { } self.isSeeking = false + if seekingInfo != self.nextSeeking { self.actuallySeekToTime() } else { @@ -225,21 +249,34 @@ private extension MoviePlayer { } } - func _setupObservers() { + func _setupPlayerObservers() { + _removePlayerObservers() observations.append(player.observe(\AVPlayer.rate) { [weak self] _, _ in self?.playerRateDidChange() }) + observations.append(player.observe(\AVPlayer.currentItem) { [weak self] _, _ in + self?.playerCurrentItemDidChange() + }) observations.append(player.observe(\AVPlayer.status) { [weak self] _, _ in self?.playerStatusDidChange() }) - NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) - NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) + + if let playerItem = player.currentItem { + observations.append(playerItem.observe(\AVPlayerItem.status) { [weak self] _, _ in + self?.playerItemStatusDidChange() + }) + } + } + + func _removePlayerObservers() { + observations.forEach { $0.invalidate() } + observations.removeAll() } func _resetTimeObservers() { timeObserversQueue.removeAll() for observer in totalTimeObservers { - guard observer.targetTime >= startTime ?? 0 && observer.targetTime <= endTime ?? asset.duration.seconds else { + guard observer.targetTime >= startTime ?? 0 && observer.targetTime <= endTime ?? assetDuration else { continue } timeObserversQueue.append(observer) @@ -263,13 +300,26 @@ private extension MoviePlayer { resumeIfNeeded() } + func playerCurrentItemDidChange() { + if player.currentItem == nil && isPlaying, let playerItem = playerItem { + player.replaceCurrentItem(with: playerItem) + start() + debugPrint("Warning: Player currentItem change to nil asset:\(String(describing: asset))") + } + } + func playerStatusDidChange() { - debugPrint("status change to:\(player.status.rawValue) asset:\(asset)") + debugPrint("Player status change to:\(player.status.rawValue) asset:\(String(describing: asset))") + resumeIfNeeded() + } + + func playerItemStatusDidChange() { + debugPrint("PlayerItem status change to:\(String(describing: player.currentItem?.status.rawValue)) asset:\(String(describing: asset))") resumeIfNeeded() } func resumeIfNeeded() { - guard player.status == .readyToPlay && isPlaying == true && player.rate != playrate else { return } + guard isReadyToPlay && isPlaying == true && player.rate != playrate else { return } player.rate = playrate } @@ -377,8 +427,8 @@ private extension MoviePlayer { return } let currentTime = self.player.currentTime() - if self.videoOutput.hasNewPixelBuffer(forItemTime: currentTime) { - guard let pixelBuffer = self.videoOutput.copyPixelBuffer(forItemTime: currentTime, itemTimeForDisplay: nil) else { + if self.videoOutput?.hasNewPixelBuffer(forItemTime: currentTime) == true { + guard let pixelBuffer = self.videoOutput?.copyPixelBuffer(forItemTime: currentTime, itemTimeForDisplay: nil) else { print("Failed to copy pixel buffer at time:\(currentTime)") return } @@ -389,7 +439,7 @@ private extension MoviePlayer { } @objc func playerDidPlayToEnd(notification: Notification) { - guard loop && isPlaying && (endTime == nil || player.currentTime() == playerItem.asset.duration) else { return } + guard loop && isPlaying && (endTime == nil || player.currentTime().seconds == assetDuration) else { return } start() } From 461c65a5ccb30c95fcff40a905913d68323711e6 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 13 Apr 2019 22:18:37 +0800 Subject: [PATCH 125/332] fix MoviePlayer cannot stop --- framework/Source/iOS/MoviePlayer.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 9f81de79..1185246c 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -179,6 +179,7 @@ public class MoviePlayer: ImageSource { if shouldPlayAfterSeeking { // 0.1s has 3 frames tolerance for 30 FPS video, it should be enough if there is no sticky video let toleranceTime = CMTime(seconds: 0.1, preferredTimescale: 600) + isPlaying = true nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: toleranceTime, toleranceAfter: toleranceTime, shouldPlayAfterSeeking: shouldPlayAfterSeeking) } else { nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: .zero, toleranceAfter: .zero, shouldPlayAfterSeeking: shouldPlayAfterSeeking) @@ -194,9 +195,8 @@ public class MoviePlayer: ImageSource { player.seek(to: seekingInfo.time, toleranceBefore:seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] success in // debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success) shouldPlayAfterSeeking:\(seekingInfo.shouldPlayAfterSeeking)") guard let self = self else { return } - if seekingInfo.shouldPlayAfterSeeking { + if seekingInfo.shouldPlayAfterSeeking && self.isPlaying { self._resetTimeObservers() - self.isPlaying = true self.player.rate = self.playrate } From 7572f93da4b5f55517b31ffca1220afabfe6e205 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 15 Apr 2019 17:32:10 +0800 Subject: [PATCH 126/332] Change base class of MoviePlayer to AVPlayer --- framework/Source/iOS/MoviePlayer.swift | 130 ++++++++++++------------- 1 file changed, 64 insertions(+), 66 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 1185246c..a154c039 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -24,12 +24,11 @@ public struct MoviePlayerTimeObserver { } } -public class MoviePlayer: ImageSource { +public class MoviePlayer: AVPlayer, ImageSource { public let targets = TargetContainer() public var runBenchmark = false public var logEnabled = false public weak var delegate: MoviePlayerDelegate? - public let player: AVPlayer public var startTime: TimeInterval? public var endTime: TimeInterval? public var loop: Bool = false @@ -49,20 +48,11 @@ public class MoviePlayer: ImageSource { var totalFramesSent = 0 var totalFrameTime: Double = 0.0 public var playrate: Float = 1.0 - public var isMuted: Bool = false { - didSet { - player.isMuted = isMuted - } - } - public var volume: Float { - get { return player.volume } - set { player.volume = newValue } - } public var assetDuration: TimeInterval { return asset?.duration.seconds ?? 0 } public var isReadyToPlay: Bool { - return player.status == .readyToPlay + return status == .readyToPlay } var movieFramebuffer: Framebuffer? @@ -85,7 +75,7 @@ public class MoviePlayer: ImageSource { var nextSeeking: SeekingInfo? var isSeeking: Bool = false - public init() { + public override init() { debugPrint("movie player init") self.yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), @@ -93,51 +83,67 @@ public class MoviePlayer: ImageSource { } // Make sure player it intialized on the main thread, or it might cause KVO crash assert(Thread.isMainThread) - player = AVQueuePlayer(playerItem: nil) - NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) - NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) + super.init() + } + + override public init(playerItem item: AVPlayerItem?) { + self.playerItem = item + self.yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { + try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), + fragmentShader: YUVConversionFullRangeFragmentShader) + } + // Make sure player it intialized on the main thread, or it might cause KVO crash + assert(Thread.isMainThread) + super.init(playerItem: item) + replaceCurrentItem(with: item) } deinit { debugPrint("movie player deinit \(String(describing: asset))") stop() movieFramebuffer?.unlock() - observations.forEach { $0.invalidate() } - observations.removeAll() - NotificationCenter.default.removeObserver(self) + _removePlayerObservers() } // MARK: Data Source - public func setupPlayer(url: URL) { - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] - let inputAsset = AVURLAsset(url: url, options: inputOptions) - setupPlayer(asset: inputAsset) + public func replaceCurrentItem(with url: URL) { + let inputAsset = AVURLAsset(url: url) + let playerItem = AVPlayerItem(asset: inputAsset, automaticallyLoadedAssetKeys: [AVURLAssetPreferPreciseDurationAndTimingKey]) + replaceCurrentItem(with: playerItem) } - public func setupPlayer(asset: AVAsset) { + override public func replaceCurrentItem(with item: AVPlayerItem?) { if isPlaying { stop() } - self.videoOutput.map { self.playerItem?.remove($0) } + self.playerItem = item + self.asset = item?.asset + if let item = item { + let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] + let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) + videoOutput.suppressesPlayerRendering = true + item.add(videoOutput) + item.audioTimePitchAlgorithm = .varispeed + self.videoOutput = videoOutput + _setupPlayerObservers() + } - let playerItem = AVPlayerItem(asset: asset) - let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] - let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) - videoOutput.suppressesPlayerRendering = true - playerItem.add(videoOutput) - playerItem.audioTimePitchAlgorithm = .varispeed - - self.asset = asset - self.playerItem = playerItem - self.videoOutput = videoOutput - player.replaceCurrentItem(with: playerItem) - _setupPlayerObservers() + super.replaceCurrentItem(with: item) } // MARK: - // MARK: Playback control + override public func play() { + start() + } + + override public func playImmediately(atRate rate: Float) { + playrate = rate + start() + } + public func start() { guard playerItem != nil else { assert(playerItem != nil) @@ -153,15 +159,15 @@ public class MoviePlayer: ImageSource { public func resume() { isPlaying = true - player.rate = playrate + rate = playrate debugPrint("movie player resume \(String(describing: asset))") } - public func pause() { + override public func pause() { isPlaying = false - guard player.rate != 0 else { return } + guard rate != 0 else { return } debugPrint("movie player pause \(String(describing: asset))") - player.pause() + super.pause() } public func stop() { @@ -192,12 +198,12 @@ public class MoviePlayer: ImageSource { // https://developer.apple.com/library/archive/qa/qa1820/_index.html#//apple_ref/doc/uid/DTS40016828 guard !isSeeking, let seekingInfo = nextSeeking, isReadyToPlay else { return } isSeeking = true - player.seek(to: seekingInfo.time, toleranceBefore:seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] success in + seek(to: seekingInfo.time, toleranceBefore:seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] success in // debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success) shouldPlayAfterSeeking:\(seekingInfo.shouldPlayAfterSeeking)") guard let self = self else { return } if seekingInfo.shouldPlayAfterSeeking && self.isPlaying { self._resetTimeObservers() - self.player.rate = self.playrate + self.rate = self.playrate } self.isSeeking = false @@ -251,17 +257,15 @@ private extension MoviePlayer { func _setupPlayerObservers() { _removePlayerObservers() - observations.append(player.observe(\AVPlayer.rate) { [weak self] _, _ in - self?.playerRateDidChange() - }) - observations.append(player.observe(\AVPlayer.currentItem) { [weak self] _, _ in - self?.playerCurrentItemDidChange() - }) - observations.append(player.observe(\AVPlayer.status) { [weak self] _, _ in + NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) + observations.append(observe(\.status) { [weak self] _, _ in self?.playerStatusDidChange() }) - - if let playerItem = player.currentItem { + observations.append(observe(\.rate) { [weak self] _, _ in + self?.playerRateDidChange() + }) + if let playerItem = playerItem { observations.append(playerItem.observe(\AVPlayerItem.status) { [weak self] _, _ in self?.playerItemStatusDidChange() }) @@ -269,6 +273,8 @@ private extension MoviePlayer { } func _removePlayerObservers() { + NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: nil) + NotificationCenter.default.removeObserver(self, name: .AVPlayerItemPlaybackStalled, object: nil) observations.forEach { $0.invalidate() } observations.removeAll() } @@ -300,27 +306,19 @@ private extension MoviePlayer { resumeIfNeeded() } - func playerCurrentItemDidChange() { - if player.currentItem == nil && isPlaying, let playerItem = playerItem { - player.replaceCurrentItem(with: playerItem) - start() - debugPrint("Warning: Player currentItem change to nil asset:\(String(describing: asset))") - } - } - func playerStatusDidChange() { - debugPrint("Player status change to:\(player.status.rawValue) asset:\(String(describing: asset))") + debugPrint("Player status change to:\(status.rawValue) asset:\(String(describing: asset))") resumeIfNeeded() } func playerItemStatusDidChange() { - debugPrint("PlayerItem status change to:\(String(describing: player.currentItem?.status.rawValue)) asset:\(String(describing: asset))") + debugPrint("PlayerItem status change to:\(String(describing: currentItem?.status.rawValue)) asset:\(String(describing: asset))") resumeIfNeeded() } func resumeIfNeeded() { - guard isReadyToPlay && isPlaying == true && player.rate != playrate else { return } - player.rate = playrate + guard isReadyToPlay && isPlaying == true && rate != playrate else { return } + rate = playrate } // MARK: - @@ -426,7 +424,7 @@ private extension MoviePlayer { displayLink.invalidate() return } - let currentTime = self.player.currentTime() + let currentTime = self.currentTime() if self.videoOutput?.hasNewPixelBuffer(forItemTime: currentTime) == true { guard let pixelBuffer = self.videoOutput?.copyPixelBuffer(forItemTime: currentTime, itemTimeForDisplay: nil) else { print("Failed to copy pixel buffer at time:\(currentTime)") @@ -439,7 +437,7 @@ private extension MoviePlayer { } @objc func playerDidPlayToEnd(notification: Notification) { - guard loop && isPlaying && (endTime == nil || player.currentTime().seconds == assetDuration) else { return } + guard loop && isPlaying && (endTime == nil || currentTime().seconds == assetDuration) else { return } start() } From 5a2869e683118a42a16492664b5ef2f215f8009c Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 18 Apr 2019 13:37:58 +0800 Subject: [PATCH 127/332] Clean up CoreVideoTexture cache after rendering every frame to save memory --- framework/Source/iOS/Camera.swift | 5 +++++ framework/Source/iOS/MoviePlayer.swift | 3 +++ 2 files changed, 8 insertions(+) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 8de994f7..49623349 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -369,6 +369,11 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) + // Clean up after all done + if self.captureAsYUV && sharedImageProcessingContext.supportsTextureCaches() { + CVOpenGLESTextureCacheFlush(sharedImageProcessingContext.coreVideoTextureCache, 0) + } + if self.runBenchmark { self.numberOfFramesCaptured += 1 if (self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore) { diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index a154c039..1fc99bc8 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -406,6 +406,9 @@ private extension MoviePlayer { updateTargetsWithFramebuffer(framebuffer) + // Clean up + CVOpenGLESTextureCacheFlush(sharedImageProcessingContext.coreVideoTextureCache, 0) + if(runBenchmark || logEnabled) { totalFramesSent += 1 } From da5cd15e7f06518fffcde6dc82b644308bbd3205 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 21 Apr 2019 12:53:53 +0800 Subject: [PATCH 128/332] fix MovieOutput crash when activateAudioTrack --- framework/Source/iOS/Camera.swift | 2 +- framework/Source/iOS/MovieInput.swift | 6 +++++- framework/Source/iOS/MovieOutput.swift | 10 ++++++++-- framework/Source/iOS/SpeakerOutput.swift | 2 +- 4 files changed, 15 insertions(+), 5 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 49623349..e17cd283 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -92,7 +92,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } do { try self.addAudioInputsAndOutputs() - audioEncodingTarget.activateAudioTrack() + try audioEncodingTarget.activateAudioTrack() } catch { print("ERROR: Could not connect audio target with error: \(error)") } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 64655611..a5be3163 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -20,7 +20,11 @@ public class MovieInput: ImageSource { guard let audioEncodingTarget = audioEncodingTarget else { return } - audioEncodingTarget.activateAudioTrack() + do { + try audioEncodingTarget.activateAudioTrack() + } catch { + print("ERROR: Could not connect audio target with error: \(error)") + } // Call enableSynchronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput. if(synchronizedMovieOutput != nil) { self.enableSynchronizedEncoding() } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 286a312a..95f0e17a 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -1,7 +1,7 @@ import AVFoundation public protocol AudioEncodingTarget { - func activateAudioTrack() + func activateAudioTrack() throws func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) // Note: This is not used for synchronized encoding. func readyForNextAudioBuffer() -> Bool @@ -14,6 +14,7 @@ public protocol MovieOutputDelegate: class { public enum MovieOutputError: Error, CustomStringConvertible { case startWritingError(assetWriterError: Error?) case pixelBufferPoolNilError + case activeAudioTrackError public var errorDescription: String { switch self { @@ -21,6 +22,8 @@ public enum MovieOutputError: Error, CustomStringConvertible { return "Could not start asset writer: \(String(describing: assetWriterError))" case .pixelBufferPoolNilError: return "Asset writer pixel buffer pool was nil. Make sure that your output file doesn't already exist." + case .activeAudioTrackError: + return "cannot active audio track when assetWriter status is not 0" } } @@ -421,7 +424,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: - // MARK: Audio support - public func activateAudioTrack() { + public func activateAudioTrack() throws { + guard assetWriter.status == .unknown else { + throw MovieOutputError.activeAudioTrackError + } assetWriterAudioInput = AVAssetWriterInput(mediaType:.audio, outputSettings:self.audioSettings, sourceFormatHint:self.audioSourceFormatHint) assetWriter.add(assetWriterAudioInput!) diff --git a/framework/Source/iOS/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift index caa1ebc4..17e9285d 100644 --- a/framework/Source/iOS/SpeakerOutput.swift +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -93,7 +93,7 @@ public class SpeakerOutput: AudioEncodingTarget { // MARK: - // MARK: AudioEncodingTarget protocol - public func activateAudioTrack() { + public func activateAudioTrack() throws { if(changesAudioSession) { do { try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.ambient) From d53d4f279fa503b9a588e2a55f8b45757d0c3a79 Mon Sep 17 00:00:00 2001 From: RoCry Date: Mon, 15 Apr 2019 17:01:21 +0800 Subject: [PATCH 129/332] Update config to compile FilterShowcase --- .../FilterShowcase.xcodeproj/project.pbxproj | 12 ++++++++---- .../FilterDisplayViewController.swift | 2 +- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj index 8f8405f8..0dfc5aa4 100644 --- a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj +++ b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj @@ -209,6 +209,7 @@ TargetAttributes = { BC0037B6195CA11B00B9D651 = { CreatedOnToolsVersion = 6.0; + DevelopmentTeam = C9FPMJGKC4; LastSwiftMigration = 0800; ProvisioningStyle = Automatic; }; @@ -219,6 +220,7 @@ developmentRegion = English; hasScannedForEncodings = 0; knownRegions = ( + English, en, Base, ); @@ -420,12 +422,13 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = C9FPMJGKC4; INFOPLIST_FILE = FilterShowcaseSwift/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "com.sunsetlakesoftware.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = FilterShowcase; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 4.0; }; name = Debug; }; @@ -434,13 +437,14 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = C9FPMJGKC4; INFOPLIST_FILE = FilterShowcaseSwift/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "com.sunsetlakesoftware.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = FilterShowcase; SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; - SWIFT_VERSION = 3.0; + SWIFT_VERSION = 4.0; }; name = Release; }; diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift index 00323443..81727d19 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift @@ -15,7 +15,7 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega required init(coder aDecoder: NSCoder) { do { - videoCamera = try Camera(sessionPreset:AVCaptureSessionPreset640x480, location:.backFacing) + videoCamera = try Camera(sessionPreset:AVCaptureSession.Preset.vga640x480, location:.backFacing) videoCamera!.runBenchmark = true } catch { videoCamera = nil From fa960422fe86ca0a3533356360ef366dd554f813 Mon Sep 17 00:00:00 2001 From: RoCry Date: Mon, 22 Apr 2019 21:58:56 +0800 Subject: [PATCH 130/332] Try reuse buffer for crop filter --- framework/Source/Operations/Crop.swift | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/framework/Source/Operations/Crop.swift b/framework/Source/Operations/Crop.swift index f0113da8..7118af33 100644 --- a/framework/Source/Operations/Crop.swift +++ b/framework/Source/Operations/Crop.swift @@ -28,8 +28,15 @@ open class Crop: BasicOperation { normalizedOffsetFromOrigin = Position.zero } let normalizedCropSize = Size(width:Float(finalCropSize.width) / Float(inputSize.width), height:Float(finalCropSize.height) / Float(inputSize.height)) - - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:finalCropSize, stencil:false) + + let bufferSize:GLSize + if abs(abs(Double(inputSize.width)/Double(inputSize.height)) - abs(Double(finalCropSize.width)/Double(finalCropSize.height))) < 0.01 { + bufferSize = inputSize + } else { + bufferSize = finalCropSize + } + + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:bufferSize, stencil:false) let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) From 62fb8caca60bf3f9f45f8273e0221fe2c1135d15 Mon Sep 17 00:00:00 2001 From: RoCry Date: Tue, 23 Apr 2019 14:33:29 +0800 Subject: [PATCH 131/332] Refactor ResizeCrop --- .../FilterShowcase/FilterOperations.swift | 10 +++ framework/GPUImage.xcodeproj/project.pbxproj | 7 ++ framework/Source/Operations/ResizeCrop.swift | 64 ++++++++++--------- 3 files changed, 51 insertions(+), 30 deletions(-) diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift index 3440e17f..7af4b396 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift @@ -164,6 +164,16 @@ let filterOperations: Array = [ }, filterOperationType:.singleInput ), + FilterOperation( + filter:{ResizeCrop()}, + listName:"ResizeCrop", + titleName:"ResizeCrop", + sliderConfiguration:.enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), + sliderUpdateCallback:{(filter, sliderValue) in + filter.cropSizeInPixels = Size(width:480.0, height:sliderValue) + }, + filterOperationType:.singleInput + ), FilterOperation( filter:{Luminance()}, listName:"Masking", diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index 9c786b21..1a5322d0 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -361,6 +361,8 @@ BCFF46FC1CBAF85000A0C521 /* TransformOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFF46FB1CBAF85000A0C521 /* TransformOperation.swift */; }; BCFF46FE1CBB0C1F00A0C521 /* AverageColorExtractor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFF46FD1CBB0C1F00A0C521 /* AverageColorExtractor.swift */; }; BCFF47081CBB443B00A0C521 /* CameraConversion.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFF47071CBB443B00A0C521 /* CameraConversion.swift */; }; + D1D81C8E226EE95D00013E68 /* ResizeCrop.swift in Sources */ = {isa = PBXBuildFile; fileRef = D1D81C8D226EE95C00013E68 /* ResizeCrop.swift */; }; + D1D81C8F226EE99000013E68 /* ResizeCrop.swift in Sources */ = {isa = PBXBuildFile; fileRef = D1D81C8D226EE95C00013E68 /* ResizeCrop.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -705,6 +707,7 @@ BCFF46FF1CBB0D8900A0C521 /* AverageColor_GL.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = AverageColor_GL.fsh; path = Source/Operations/Shaders/AverageColor_GL.fsh; sourceTree = ""; }; BCFF47001CBB0D8900A0C521 /* AverageColor.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = AverageColor.vsh; path = Source/Operations/Shaders/AverageColor.vsh; sourceTree = ""; }; BCFF47071CBB443B00A0C521 /* CameraConversion.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = CameraConversion.swift; path = Source/CameraConversion.swift; sourceTree = ""; }; + D1D81C8D226EE95C00013E68 /* ResizeCrop.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ResizeCrop.swift; path = Source/Operations/ResizeCrop.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -922,6 +925,7 @@ BCFF46AD1CB7554700A0C521 /* AdaptiveThreshold.swift */, BCFF46AF1CB7561D00A0C521 /* AdaptiveThreshold_GL.fsh */, BC2C48031CB80E860085E4BC /* Crop.swift */, + D1D81C8D226EE95C00013E68 /* ResizeCrop.swift */, BCFF46C51CB968DE00A0C521 /* ImageBuffer.swift */, BCFF46C71CB96AB100A0C521 /* LowPassFilter.swift */, BCFF46C91CB96BD700A0C521 /* HighPassFilter.swift */, @@ -1350,6 +1354,7 @@ developmentRegion = English; hasScannedForEncodings = 0; knownRegions = ( + English, en, ); mainGroup = BC6E7CA11C39A9D8006DF678; @@ -1438,6 +1443,7 @@ BCFF46E21CBADB3E00A0C521 /* SingleComponentGaussianBlur.swift in Sources */, BC7FD0F71CB0620E00037949 /* ChromaKeyBlend.swift in Sources */, BC7FD0861CA62E1100037949 /* BrightnessAdjustment.swift in Sources */, + D1D81C8E226EE95D00013E68 /* ResizeCrop.swift in Sources */, BC7FD1631CB17C8D00037949 /* ImageOrientation.swift in Sources */, BC9673411C8B897100FB64C2 /* FramebufferCache.swift in Sources */, BC7FD0BB1CA7799B00037949 /* Halftone.swift in Sources */, @@ -1707,6 +1713,7 @@ BC9E35CA1E5257F100B8604F /* LightenBlend.swift in Sources */, BC9E35291E524D5B00B8604F /* MovieOutput.swift in Sources */, BC9E356B1E5256C500B8604F /* ColorInversion.swift in Sources */, + D1D81C8F226EE99000013E68 /* ResizeCrop.swift in Sources */, BC9E35621E5256A500B8604F /* OperationGroup.swift in Sources */, BC9E35381E524D7E00B8604F /* OpenGLRendering.swift in Sources */, BC9E35B41E5257A900B8604F /* ToonFilter.swift in Sources */, diff --git a/framework/Source/Operations/ResizeCrop.swift b/framework/Source/Operations/ResizeCrop.swift index 27703969..ce077499 100644 --- a/framework/Source/Operations/ResizeCrop.swift +++ b/framework/Source/Operations/ResizeCrop.swift @@ -1,10 +1,3 @@ -// -// ResizeCrop.swift -// Alamofire -// -// Created by rocry on 5/14/18. -// - open class ResizeCrop: BasicOperation { public var cropSizeInPixels: Size? @@ -14,37 +7,48 @@ open class ResizeCrop: BasicOperation { override open func renderFrame() { let inputFramebuffer:Framebuffer = inputFramebuffers[0]! - let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait) + let inputGLSize = inputFramebuffer.sizeForTargetOrientation(.portrait) + let inputSize = Size(inputGLSize) + + let (normalizedOffsetFromOrigin, finalCropSize) = calculateFinalFrame(inputSize: inputSize) + let normalizedCropSize = Size(width: finalCropSize.width / inputSize.width, height: finalCropSize.height / inputSize.height) + + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties( + orientation: .portrait, + size: GLSize(finalCropSize), + stencil: false) - let finalCropSize:GLSize - let normalizedOffsetFromOrigin:Position + let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) + + renderFramebuffer.activateFramebufferForRendering() + clearFramebufferWithColor(backgroundColor) + renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[textureProperties]) + releaseIncomingFramebuffers() + } + + public func calculateFinalFrame(inputSize: Size) -> (Position, Size) { + let finalCropSize: Size + let normalizedOffsetFromOrigin: Position + if let cropSize = cropSizeInPixels { - let glCropSize: GLSize - - let ratioW = cropSize.width / Float(inputSize.width) - let ratioH = cropSize.height / Float(inputSize.height) + let glCropSize: Size + + let ratioW = cropSize.width / inputSize.width + let ratioH = cropSize.height / inputSize.height if ratioW > ratioH { - glCropSize = GLSize(width: inputSize.width, height: GLint(Float(inputSize.width) * (cropSize.height / cropSize.width))) + glCropSize = Size(width: inputSize.width, height: inputSize.width * (cropSize.height / cropSize.width)) } else { - glCropSize = GLSize(width: GLint(Float(inputSize.height) * (cropSize.width / cropSize.height)), height: inputSize.height) + glCropSize = Size(width: inputSize.height * (cropSize.width / cropSize.height), height: inputSize.height) } - - finalCropSize = GLSize(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) - normalizedOffsetFromOrigin = Position(Float(inputSize.width / 2 - finalCropSize.width / 2) / Float(inputSize.width), - Float(inputSize.height / 2 - finalCropSize.height / 2) / Float(inputSize.height)) + + finalCropSize = Size(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) + normalizedOffsetFromOrigin = Position((inputSize.width / 2 - finalCropSize.width / 2) / inputSize.width, + (inputSize.height / 2 - finalCropSize.height / 2) / inputSize.height) } else { finalCropSize = inputSize normalizedOffsetFromOrigin = Position.zero } - let normalizedCropSize = Size(width:Float(finalCropSize.width) / Float(inputSize.width), height:Float(finalCropSize.height) / Float(inputSize.height)) - - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:finalCropSize, stencil:false) - - let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) - - renderFramebuffer.activateFramebufferForRendering() - clearFramebufferWithColor(backgroundColor) - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[textureProperties]) - releaseIncomingFramebuffers() + + return (normalizedOffsetFromOrigin, finalCropSize) } } From 851c845f9f87be3824fb554fb4753f41f31d9556 Mon Sep 17 00:00:00 2001 From: RoCry Date: Tue, 23 Apr 2019 17:22:40 +0800 Subject: [PATCH 132/332] Export newFramebufferAvailable in OperationGroup --- framework/Source/OperationGroup.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/OperationGroup.swift b/framework/Source/OperationGroup.swift index 634c7219..b5ffcaae 100644 --- a/framework/Source/OperationGroup.swift +++ b/framework/Source/OperationGroup.swift @@ -9,7 +9,7 @@ open class OperationGroup: ImageProcessingOperation { public init() { } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + open func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { inputImageRelay.newFramebufferAvailable(framebuffer, fromSourceIndex:fromSourceIndex) } From 3ec5270c11981ecb869be7f9ba0ec5f061f7bd6f Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 24 Apr 2019 20:54:07 +0800 Subject: [PATCH 133/332] update MovieOutput audio start logic --- framework/Source/iOS/MovieOutput.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 95f0e17a..0eedd1c1 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -425,7 +425,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Audio support public func activateAudioTrack() throws { - guard assetWriter.status == .unknown else { + guard assetWriter.status != .completed else { throw MovieOutputError.activeAudioTrackError } assetWriterAudioInput = AVAssetWriterInput(mediaType:.audio, outputSettings:self.audioSettings, sourceFormatHint:self.audioSourceFormatHint) From 8bb29f68c11ea172688db2f22e5063cea84f571b Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 1 May 2019 17:16:46 +0800 Subject: [PATCH 134/332] improve(MovieOutput): add recordedDuration, waitEncodingForLiveVideo and error delegate --- framework/Source/iOS/MovieOutput.swift | 41 ++++++++++++++++++++++---- 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 0eedd1c1..213f8f0a 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -9,6 +9,12 @@ public protocol AudioEncodingTarget { public protocol MovieOutputDelegate: class { func movieOutputDidStartWriting(_ movieOutput: MovieOutput, at time: CMTime) + func movieOutputWriterError(_ movieOutput: MovieOutput, error: Error) +} + +public extension MovieOutputDelegate { + func movieOutputDidStartWriting(_ movieOutput: MovieOutput, at time: CMTime) {} + func movieOutputWriterError(_ movieOutput: MovieOutput, error: Error) {} } public enum MovieOutputError: Error, CustomStringConvertible { @@ -49,6 +55,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private var isRecording = false var videoEncodingIsFinished = false var audioEncodingIsFinished = false + private var startFrameTime: CMTime? + public var recordedDuration: CMTime? private var previousFrameTime: CMTime? var encodingLiveVideo:Bool { didSet { @@ -59,6 +67,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var pendingAudioBuffers = [CMSampleBuffer]() public private(set) var pixelBuffer:CVPixelBuffer? = nil public var dropFirstFrames: Int = 0 + public var waitUtilDataIsReadyForLiveVideo = false let keepLastPixelBuffer: Bool var renderFramebuffer:Framebuffer! @@ -69,10 +78,15 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var synchronizedEncodingDebug = false var totalFramesAppended:Int = 0 + private var observations = [NSKeyValueObservation]() deinit { + observations.forEach { $0.invalidate() } debugPrint("movie output deinit \(assetWriter.outputURL)") } + var shouldWaitForEncoding: Bool { + return !encodingLiveVideo || waitUtilDataIsReadyForLiveVideo + } public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil, keepLastPixelBuffer: Bool = false) throws { @@ -140,6 +154,17 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. let block = { () -> Void in do { + guard self.assetWriter.status != .cancelled else { + completionCallback?(false, MovieOutputError.startWritingError(assetWriterError: nil)) + return + } + + let observation = self.assetWriter.observe(\.error) { [weak self] writer, _ in + guard let self = self, let error = writer.error else { return } + self.delegate?.movieOutputWriterError(self, error: error) + } + self.observations.append(observation) + var success = false try NSObject.catchException { success = self.assetWriter.startWriting() @@ -202,6 +227,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } self.pendingAudioBuffers.removeAll() + if let lastFrame = self.previousFrameTime, let startFrame = self.startFrameTime { + self.recordedDuration = lastFrame - startFrame + } self.assetWriter.finishWriting { completionCallback?() } @@ -262,12 +290,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.previousFrameTime = frameTime - guard (self.assetWriterVideoInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { + guard (self.assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { print("Had to drop a frame at time \(frameTime)") return } - while(!self.assetWriterVideoInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.videoEncodingIsFinished) { + while(!self.assetWriterVideoInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.videoEncodingIsFinished) { self.synchronizedEncodingDebugPrint("Video waiting...") // Better to poll isReadyForMoreMediaData often since when it does become true // we don't want to risk letting framebuffers pile up in between poll intervals. @@ -378,11 +406,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. self.assetWriter.startSession(atSourceTime: frameTime) self.delegate?.movieOutputDidStartWriting(self, at: frameTime) + self.startFrameTime = frameTime } self.previousFrameTime = frameTime - guard (self.assetWriterVideoInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { + guard (self.assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { print("Had to drop a frame at time \(frameTime)") return } @@ -392,7 +421,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return } - while(!self.assetWriterVideoInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.videoEncodingIsFinished) { + while(!self.assetWriterVideoInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.videoEncodingIsFinished) { self.synchronizedEncodingDebugPrint("Video waiting...") // Better to poll isReadyForMoreMediaData often since when it does become true // we don't want to risk letting framebuffers pile up in between poll intervals. @@ -453,12 +482,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - guard (assetWriterAudioInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { + guard (assetWriterAudioInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { print("Had to drop a audio sample at time \(currentSampleTime)") return } - while(!assetWriterAudioInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.audioEncodingIsFinished) { + while(!assetWriterAudioInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.audioEncodingIsFinished) { self.synchronizedEncodingDebugPrint("Audio waiting...") usleep(100000) } From 0e1865735d5a8ff6553b842a592e7754549ea5a0 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 1 May 2019 18:04:33 +0800 Subject: [PATCH 135/332] improve(Camera): fix force unwrap crash --- framework/Source/iOS/Camera.swift | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index e17cd283..f0d1e497 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -301,16 +301,22 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let notFrameDrop = dontDropFrames guard notFrameDrop || (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return } - - let startTime = CFAbsoluteTimeGetCurrent() - - let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer)! - let bufferWidth = CVPixelBufferGetWidth(cameraFrame) - let bufferHeight = CVPixelBufferGetHeight(cameraFrame) - let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) sharedImageProcessingContext.runOperationAsynchronously{ + defer { + if !notFrameDrop { + self.frameRenderingSemaphore.signal() + } + } + let startTime = CFAbsoluteTimeGetCurrent() + guard let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer) else { + print("Warning: cannot get imageBuffer") + return + } + let bufferWidth = CVPixelBufferGetWidth(cameraFrame) + let bufferHeight = CVPixelBufferGetHeight(cameraFrame) + let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) let cameraFramebuffer:Framebuffer self.delegate?.didCaptureBuffer(sampleBuffer) @@ -393,10 +399,6 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.framesSinceLastCheck += 1 } - - if !notFrameDrop { - self.frameRenderingSemaphore.signal() - } } } From a62303cfecc9b8e127a2b0ff3614f272a9ead0da Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 5 May 2019 15:58:00 +0800 Subject: [PATCH 136/332] Fix crash when calling OpenGL when app is not foreground --- framework/Source/iOS/RenderView.swift | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index c6f442b3..2f034d18 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -24,6 +24,7 @@ public class RenderView:UIView, ImageConsumer { var displayFramebuffer:GLuint? var displayRenderbuffer:GLuint? var backingSize = GLSize(width:0, height:0) + private var isAppForeground: Bool = true private lazy var displayShader:ShaderProgram = { return sharedImageProcessingContext.passthroughShader @@ -67,6 +68,13 @@ public class RenderView:UIView, ImageConsumer { eaglLayer.drawableProperties = [kEAGLDrawablePropertyRetainedBacking: NSNumber(value:false), kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8] eaglLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill // Just for safety to prevent distortion + NotificationCenter.default.addObserver(forName: UIApplication.didBecomeActiveNotification, object: nil, queue: .main) { [weak self] _ in + self?.isAppForeground = true + } + NotificationCenter.default.addObserver(forName: UIApplication.didEnterBackgroundNotification, object: nil, queue: .main) { [weak self] _ in + self?.isAppForeground = false + } + self.internalLayer = eaglLayer } @@ -77,6 +85,9 @@ public class RenderView:UIView, ImageConsumer { } func createDisplayFramebuffer() -> Bool { + // Fix crash when calling OpenGL when app is not foreground + guard isAppForeground else { return false } + var newDisplayFramebuffer:GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) displayFramebuffer = newDisplayFramebuffer @@ -157,6 +168,9 @@ public class RenderView:UIView, ImageConsumer { } let work: () -> Void = { + // Fix crash when calling OpenGL when app is not foreground + guard self.isAppForeground else { return } + if (self.displayFramebuffer == nil && !self.createDisplayFramebuffer()) { cleanup() // Bail if we couldn't successfully create the displayFramebuffer From d92934bce79dbb645208de40e1427001f4c555e8 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 5 May 2019 23:37:08 +0800 Subject: [PATCH 137/332] improve MoviePlayer --- framework/Source/iOS/MoviePlayer.swift | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 1fc99bc8..24818303 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -55,7 +55,6 @@ public class MoviePlayer: AVPlayer, ImageSource { return status == .readyToPlay } - var movieFramebuffer: Framebuffer? var framebufferUserInfo: [AnyHashable:Any]? var observations = [NSKeyValueObservation]() @@ -101,7 +100,6 @@ public class MoviePlayer: AVPlayer, ImageSource { deinit { debugPrint("movie player deinit \(String(describing: asset))") stop() - movieFramebuffer?.unlock() _removePlayerObservers() } @@ -127,6 +125,9 @@ public class MoviePlayer: AVPlayer, ImageSource { item.audioTimePitchAlgorithm = .varispeed self.videoOutput = videoOutput _setupPlayerObservers() + } else { + self.videoOutput = nil + _removePlayerObservers() } super.replaceCurrentItem(with: item) @@ -220,14 +221,6 @@ public class MoviePlayer: AVPlayer, ImageSource { // Not needed for movie inputs } - func transmitPreviousFrame() { - sharedImageProcessingContext.runOperationAsynchronously { - if let movieFramebuffer = self.movieFramebuffer { - self.updateTargetsWithFramebuffer(movieFramebuffer) - } - } - } - public func addTimeObserver(seconds: TimeInterval, callback: @escaping MoviePlayerTimeObserverCallback) -> MoviePlayerTimeObserver { let timeObserver = MoviePlayerTimeObserver(targetTime: seconds, callback: callback) totalTimeObservers.append(timeObserver) @@ -245,6 +238,13 @@ public class MoviePlayer: AVPlayer, ImageSource { return observer.observerID == timeObserver.observerID } } + + public func removeAllTimeObservers() { + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + self?.timeObserversQueue.removeAll() + self?.totalTimeObservers.removeAll() + } + } } private extension MoviePlayer { @@ -282,7 +282,7 @@ private extension MoviePlayer { func _resetTimeObservers() { timeObserversQueue.removeAll() for observer in totalTimeObservers { - guard observer.targetTime >= startTime ?? 0 && observer.targetTime <= endTime ?? assetDuration else { + guard observer.targetTime >= (startTime ?? 0) && observer.targetTime <= endTime ?? assetDuration else { continue } timeObserversQueue.append(observer) @@ -389,7 +389,6 @@ private extension MoviePlayer { return } - movieFramebuffer?.unlock() let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) framebuffer.lock() @@ -402,7 +401,6 @@ private extension MoviePlayer { framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(sampleTime)) framebuffer.userInfo = framebufferUserInfo - movieFramebuffer = framebuffer updateTargetsWithFramebuffer(framebuffer) From 8a0fbd5932a7943e513b00b4e0de05c39ddb60c7 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 9 May 2019 21:18:11 +0800 Subject: [PATCH 138/332] make Framebuffer lock public --- framework/Source/Framebuffer.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 94a86fa7..3a952807 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -170,7 +170,7 @@ public class Framebuffer { weak var cache:FramebufferCache? var framebufferRetainCount = 0 - func lock() { + public func lock() { framebufferRetainCount += 1 } From 0dc039e460dc9c08f11d882acf4caf775fe78c60 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 10 May 2019 13:57:17 +0800 Subject: [PATCH 139/332] fix ResizeCrop cannot use cropSize as final output size --- framework/Source/Operations/ResizeCrop.swift | 35 ++++++++++++++------ 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/framework/Source/Operations/ResizeCrop.swift b/framework/Source/Operations/ResizeCrop.swift index ce077499..404d80f4 100644 --- a/framework/Source/Operations/ResizeCrop.swift +++ b/framework/Source/Operations/ResizeCrop.swift @@ -1,4 +1,5 @@ open class ResizeCrop: BasicOperation { + public var useCropSizeAsFinal = false public var cropSizeInPixels: Size? public init() { @@ -10,8 +11,7 @@ open class ResizeCrop: BasicOperation { let inputGLSize = inputFramebuffer.sizeForTargetOrientation(.portrait) let inputSize = Size(inputGLSize) - let (normalizedOffsetFromOrigin, finalCropSize) = calculateFinalFrame(inputSize: inputSize) - let normalizedCropSize = Size(width: finalCropSize.width / inputSize.width, height: finalCropSize.height / inputSize.height) + let (normalizedOffsetFromOrigin, finalCropSize, normalizedCropSize) = calculateFinalFrame(inputSize: inputSize) renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties( orientation: .portrait, @@ -26,29 +26,42 @@ open class ResizeCrop: BasicOperation { releaseIncomingFramebuffers() } - public func calculateFinalFrame(inputSize: Size) -> (Position, Size) { + public func calculateFinalFrame(inputSize: Size) -> (Position, Size, Size) { let finalCropSize: Size + let normalizedCropSize: Size let normalizedOffsetFromOrigin: Position if let cropSize = cropSizeInPixels { let glCropSize: Size - let ratioW = cropSize.width / inputSize.width - let ratioH = cropSize.height / inputSize.height - if ratioW > ratioH { - glCropSize = Size(width: inputSize.width, height: inputSize.width * (cropSize.height / cropSize.width)) + if useCropSizeAsFinal { + // finalCropSize might be resized + glCropSize = cropSize } else { - glCropSize = Size(width: inputSize.height * (cropSize.width / cropSize.height), height: inputSize.height) + // finalCropSize won't be resized + let ratioW = cropSize.width / inputSize.width + let ratioH = cropSize.height / inputSize.height + if ratioW > ratioH { + glCropSize = Size(width: inputSize.width, height: inputSize.width * (cropSize.height / cropSize.width)) + } else { + glCropSize = Size(width: inputSize.height * (cropSize.width / cropSize.height), height: inputSize.height) + } } finalCropSize = Size(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) - normalizedOffsetFromOrigin = Position((inputSize.width / 2 - finalCropSize.width / 2) / inputSize.width, - (inputSize.height / 2 - finalCropSize.height / 2) / inputSize.height) + + // Scale finalCropSize to inputSize to crop original content + let aspectFitRatioToOrigin = min(inputSize.width / finalCropSize.width, inputSize.height / finalCropSize.height) + let cropSizeInOrigin = Size(width: finalCropSize.width * aspectFitRatioToOrigin, height: finalCropSize.height * aspectFitRatioToOrigin) + normalizedCropSize = Size(width: cropSizeInOrigin.width / inputSize.width, height: cropSizeInOrigin.height / inputSize.height) + normalizedOffsetFromOrigin = Position((inputSize.width / 2 - cropSizeInOrigin.width / 2) / inputSize.width, + (inputSize.height / 2 - cropSizeInOrigin.height / 2) / inputSize.height) } else { finalCropSize = inputSize normalizedOffsetFromOrigin = Position.zero + normalizedCropSize = Size(width: 1, height: 1) } - return (normalizedOffsetFromOrigin, finalCropSize) + return (normalizedOffsetFromOrigin, finalCropSize, normalizedCropSize) } } From c76c36ee0556885948d48857d44dabdf02a630b0 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 11 May 2019 01:14:55 +0800 Subject: [PATCH 140/332] fix MoviePlayer framebuffer was retained unexpectedly --- framework/Source/iOS/MoviePlayer.swift | 1 - 1 file changed, 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 24818303..8f6f865a 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -390,7 +390,6 @@ private extension MoviePlayer { } let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) - framebuffer.lock() convertYUVToRGB(shader: yuvConversionShader, luminanceFramebuffer: luminanceFramebuffer, From 22a882ad84ad4ed5492fd5fc2a24a2fcb6f5f782 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 13 May 2019 16:48:30 +0800 Subject: [PATCH 141/332] fix(MovieOutput): calling activateAudioTrack crashed when writing --- framework/Source/iOS/MovieOutput.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 213f8f0a..9f3a101c 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -454,7 +454,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Audio support public func activateAudioTrack() throws { - guard assetWriter.status != .completed else { + guard assetWriter.status != .writing && assetWriter.status != .completed else { throw MovieOutputError.activeAudioTrackError } assetWriterAudioInput = AVAssetWriterInput(mediaType:.audio, outputSettings:self.audioSettings, sourceFormatHint:self.audioSourceFormatHint) From 85e513433bfe6ad782a52ad4f7447fa726c512ee Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 22 May 2019 16:41:16 +0800 Subject: [PATCH 142/332] =?UTF-8?q?fix=20MovieOutput=20didn=E2=80=99t=20sa?= =?UTF-8?q?ve=20startFrameTime=20in=20newFrameAvailable?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- framework/Source/iOS/MovieOutput.swift | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 9f3a101c..933cd105 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -285,6 +285,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if (self.previousFrameTime == nil) { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. self.assetWriter.startSession(atSourceTime: frameTime) + self.startFrameTime = frameTime self.delegate?.movieOutputDidStartWriting(self, at: frameTime) } @@ -405,8 +406,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if (self.previousFrameTime == nil) { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. self.assetWriter.startSession(atSourceTime: frameTime) - self.delegate?.movieOutputDidStartWriting(self, at: frameTime) self.startFrameTime = frameTime + self.delegate?.movieOutputDidStartWriting(self, at: frameTime) } self.previousFrameTime = frameTime From 034342aa74bfae562cd9c1bdbdc701f2be2411ea Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 22 May 2019 16:41:55 +0800 Subject: [PATCH 143/332] improve(MoviePlayer): make shader prepare code to async --- framework/Source/iOS/MoviePlayer.swift | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 8f6f865a..cc6fd918 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -39,7 +39,7 @@ public class MoviePlayer: AVPlayer, ImageSource { var videoOutput: AVPlayerItemVideoOutput? var displayLink: CADisplayLink? - let yuvConversionShader: ShaderProgram + var yuvConversionShader: ShaderProgram? var totalTimeObservers = [MoviePlayerTimeObserver]() var timeObserversQueue = [MoviePlayerTimeObserver]() @@ -76,25 +76,19 @@ public class MoviePlayer: AVPlayer, ImageSource { public override init() { debugPrint("movie player init") - self.yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { - try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), - fragmentShader: YUVConversionFullRangeFragmentShader) - } // Make sure player it intialized on the main thread, or it might cause KVO crash assert(Thread.isMainThread) super.init() + _setupShader() } override public init(playerItem item: AVPlayerItem?) { self.playerItem = item - self.yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { - try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), - fragmentShader: YUVConversionFullRangeFragmentShader) - } // Make sure player it intialized on the main thread, or it might cause KVO crash assert(Thread.isMainThread) super.init(playerItem: item) replaceCurrentItem(with: item) + _setupShader() } deinit { @@ -248,6 +242,15 @@ public class MoviePlayer: AVPlayer, ImageSource { } private extension MoviePlayer { + func _setupShader() { + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + self?.yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { + try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), + fragmentShader: YUVConversionFullRangeFragmentShader) + } + } + } + func _setupDisplayLinkIfNeeded() { if displayLink == nil { displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallback)) @@ -325,6 +328,10 @@ private extension MoviePlayer { // MARK: Internal processing functions func _process(movieFrame: CVPixelBuffer, with sampleTime: CMTime) { + guard let yuvConversionShader = yuvConversionShader else { + debugPrint("ERROR! yuvConversionShader hasn't been setup before starting") + return + } delegate?.moviePlayerDidReadPixelBuffer(movieFrame, time: CMTimeGetSeconds(sampleTime)) let bufferHeight = CVPixelBufferGetHeight(movieFrame) From 75c89594fef473f495ec28e2e6d5da82f3cbd59b Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 24 May 2019 18:57:23 +0800 Subject: [PATCH 144/332] fix AVAssetWriter error when cancelWriting/startWriting operations are too close --- framework/Source/iOS/MovieOutput.swift | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 933cd105..739c8338 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -39,7 +39,7 @@ public enum MovieOutputError: Error, CustomStringConvertible { } public class MovieOutput: ImageConsumer, AudioEncodingTarget { - + private static let assetWriterQueue = DispatchQueue(label: "com.GPUImage2.MovieOutput.assetWriterQueue", qos: .userInitiated) public let sources = SourceContainer() public let maximumInputs:UInt = 1 @@ -201,12 +201,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if sync { block() } else { - DispatchQueue.global(qos: .userInitiated).async(execute: block) + MovieOutput.assetWriterQueue.async(execute: block) } } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { - movieProcessingContext.runOperationAsynchronously{ + MovieOutput.assetWriterQueue.async { guard self.isRecording, self.assetWriter.status == .writing else { completionCallback?() @@ -239,7 +239,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func cancelRecording(_ completionCallback:(() -> Void)? = nil) { - movieProcessingContext.runOperationAsynchronously{ + MovieOutput.assetWriterQueue.async { guard self.isRecording, self.assetWriter.status == .writing else { completionCallback?() From a30ac0768d181d9d11ef2eeebd983b85e147891b Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 13 Jun 2019 19:14:10 +0800 Subject: [PATCH 145/332] improve(MovieInput): support exporting without using OpenGL --- framework/Source/iOS/MovieInput.swift | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index a5be3163..d5ff188b 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -75,6 +75,7 @@ public class MovieInput: ImageSource { var audioInputStatusObserver:NSKeyValueObservation? public var useRealtimeThreads = false + public var transcodingOnly = false var timebaseInfo = mach_timebase_info_data_t() var currentThread:Thread? @@ -330,6 +331,11 @@ public class MovieInput: ImageSource { var duration = self.asset.duration // Only used for the progress block so its acuracy is not critical self.synchronizedEncodingDebugPrint("Process video frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") + if transcodingOnly, let movieOutput = synchronizedMovieOutput { + movieOutput.processVideoBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) + return + } + self.currentTime = currentSampleTime if let startTime = self.startTime { @@ -388,7 +394,7 @@ public class MovieInput: ImageSource { self.synchronizedEncodingDebugPrint("Process audio sample input. Time:\(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)))") - self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: true) + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: !transcodingOnly) } func process(movieFrame frame:CMSampleBuffer) { From 5a31aaa0b14a6cc6f5fa814496aa5500be64898d Mon Sep 17 00:00:00 2001 From: jandyx Date: Fri, 14 Jun 2019 19:08:15 +0800 Subject: [PATCH 146/332] improve(camera): add metadataObjectTypes as param to camera init method --- framework/Source/iOS/Camera.swift | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index f0d1e497..e1e4e643 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -141,7 +141,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var captureSessionRestartAttempts = 0 - public init(sessionPreset:AVCaptureSession.Preset, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil) throws { + public init(sessionPreset:AVCaptureSession.Preset, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil, metadataObjectTypes: [AVMetadataObject.ObjectType]? = nil) throws { debugPrint("camera init") @@ -216,13 +216,13 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - if let metadataDelegate = metadataDelegate { + if let metadataDelegate = metadataDelegate, let metadataObjectTypes = metadataObjectTypes, !metadataObjectTypes.isEmpty { let captureMetadataOutput = AVCaptureMetadataOutput() if captureSession.canAddOutput(captureMetadataOutput) { captureSession.addOutput(captureMetadataOutput) captureMetadataOutput.setMetadataObjectsDelegate(metadataDelegate, queue: cameraProcessingQueue) - captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr] + captureMetadataOutput.metadataObjectTypes = metadataObjectTypes } } From c5a5d62c920409d904629a7ef14b37e33ebde9f1 Mon Sep 17 00:00:00 2001 From: sunday37zhiyi Date: Tue, 25 Jun 2019 17:50:42 +0800 Subject: [PATCH 147/332] fix(player): resume ability in MoviePlayer --- framework/Source/iOS/MoviePlayer.swift | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index cc6fd918..a2e79d33 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -131,7 +131,11 @@ public class MoviePlayer: AVPlayer, ImageSource { // MARK: Playback control override public func play() { - start() + if displayLink == nil { + start() + } else { + resume() + } } override public func playImmediately(atRate rate: Float) { From 2c6c073424a15d2e782f83c7d4cc718ae59b4151 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 4 Jul 2019 12:25:41 +0800 Subject: [PATCH 148/332] improve(debug): add a debug extension for framebuffer to export to UIImage --- framework/Source/Framebuffer.swift | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 3a952807..8b6bbbae 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -252,3 +252,28 @@ public extension Size { return GLint(round(Double(self.height))) } } + +#if DEBUG +public extension Framebuffer { + func debugUIImage() -> UIImage? { + let bufferSize = Int(size.width * size.height * 4) + guard let buffer = NSMutableData(capacity: bufferSize) else { return nil } + glReadPixels(0, 0, size.width, size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), buffer.mutableBytes) + let dataProvider = CGDataProvider(dataInfo: nil, data: buffer.mutableBytes, size: bufferSize) {_,_,_ in } + guard let provider = dataProvider else { return nil } + let cgImage = CGImage(width: Int(size.width), + height: Int(size.height), + bitsPerComponent: 8, + bitsPerPixel: 32, + bytesPerRow: 4 * Int(size.width), + space: CGColorSpaceCreateDeviceRGB(), + bitmapInfo: .byteOrder32Big, + provider: provider, + decode: nil, + shouldInterpolate: false, + intent: .defaultIntent) + guard let cgImg = cgImage else { return nil } + return UIImage(cgImage: cgImg) + } +} +#endif From c667ba53a0a622d5139940d8442dc607d6f209b1 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 10 Jul 2019 16:24:30 +0800 Subject: [PATCH 149/332] improve: change yuvCOnversionShader to lazy loading --- framework/Source/iOS/MoviePlayer.swift | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index a2e79d33..7553dbb5 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -39,7 +39,9 @@ public class MoviePlayer: AVPlayer, ImageSource { var videoOutput: AVPlayerItemVideoOutput? var displayLink: CADisplayLink? - var yuvConversionShader: ShaderProgram? + lazy var yuvConversionShader: ShaderProgram? = { + _setupShader() + }() var totalTimeObservers = [MoviePlayerTimeObserver]() var timeObserversQueue = [MoviePlayerTimeObserver]() @@ -79,7 +81,6 @@ public class MoviePlayer: AVPlayer, ImageSource { // Make sure player it intialized on the main thread, or it might cause KVO crash assert(Thread.isMainThread) super.init() - _setupShader() } override public init(playerItem item: AVPlayerItem?) { @@ -88,7 +89,6 @@ public class MoviePlayer: AVPlayer, ImageSource { assert(Thread.isMainThread) super.init(playerItem: item) replaceCurrentItem(with: item) - _setupShader() } deinit { @@ -246,13 +246,15 @@ public class MoviePlayer: AVPlayer, ImageSource { } private extension MoviePlayer { - func _setupShader() { - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in - self?.yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { + func _setupShader() -> ShaderProgram? { + var yuvConversionShader: ShaderProgram? + sharedImageProcessingContext.runOperationSynchronously { + yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } } + return yuvConversionShader } func _setupDisplayLinkIfNeeded() { From 702dbf469142f8f6225cde158d2a7abd003a189e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 11 Jul 2019 13:31:16 +0800 Subject: [PATCH 150/332] improve(MoviePlayer): add videoOrientation --- framework/Source/iOS/MoviePlayer.swift | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 7553dbb5..78033005 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -56,6 +56,10 @@ public class MoviePlayer: AVPlayer, ImageSource { public var isReadyToPlay: Bool { return status == .readyToPlay } + public var videoOrientation: ImageOrientation { + guard let asset = asset else { return .portrait } + return asset.imageOrientation ?? .portrait + } var framebufferUserInfo: [AnyHashable:Any]? var observations = [NSKeyValueObservation]() @@ -468,3 +472,21 @@ private extension MoviePlayer { } } } + +public extension AVAsset { + var imageOrientation: ImageOrientation? { + guard let videoTrack = tracks(withMediaType: AVMediaType.video).first else { + return nil + } + let trackTransform = videoTrack.preferredTransform + switch (trackTransform.a, trackTransform.b, trackTransform.c, trackTransform.d) { + case (1, 0, 0, 1): return .portrait + case (1, 0, 0, -1): return .portraitUpsideDown + case (0, 1, -1, 0): return .landscapeLeft + case (0, -1, 1, 0): return .landscapeRight + default: + print("ERROR: unsupport transform!\(trackTransform)") + return .portrait + } + } +} From 53fec4f733d2ec8790887478594d97916f144c94 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 20 Aug 2019 00:59:16 +0800 Subject: [PATCH 151/332] improve(movie_player): improve code style and guard stopping not release --- framework/Source/iOS/MoviePlayer.swift | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 78033005..6a7be174 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -31,7 +31,7 @@ public class MoviePlayer: AVPlayer, ImageSource { public weak var delegate: MoviePlayerDelegate? public var startTime: TimeInterval? public var endTime: TimeInterval? - public var loop: Bool = false + public var loop = false public private(set) var asset: AVAsset? public private(set) var isPlaying = false @@ -78,7 +78,7 @@ public class MoviePlayer: AVPlayer, ImageSource { } } var nextSeeking: SeekingInfo? - var isSeeking: Bool = false + public var isSeeking = false public override init() { debugPrint("movie player init") @@ -436,6 +436,10 @@ private extension MoviePlayer { } @objc func displayLinkCallback(displayLink: CADisplayLink) { + guard currentItem != nil else { + stop() + return + } sharedImageProcessingContext.runOperationAsynchronously { [weak self] in guard let self = self else { displayLink.invalidate() From 4127ecc7d26df1cf6d335f4c565e17027330ef1e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 21 Aug 2019 17:39:14 +0800 Subject: [PATCH 152/332] fix MoviePlayer framebuffer orientation is wrong when it is not portrait --- framework/Source/iOS/MoviePlayer.swift | 37 +++++++++++++++++++++++--- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 6a7be174..ca2ac0f5 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -354,6 +354,8 @@ private extension MoviePlayer { var luminanceGLTexture: CVOpenGLESTexture? + let originalOrientation = asset?.originalOrientation ?? .portrait + glActiveTexture(GLenum(GL_TEXTURE0)) let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) @@ -371,7 +373,11 @@ private extension MoviePlayer { let luminanceFramebuffer: Framebuffer do { - luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture) + luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, + orientation: originalOrientation, + size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), + textureOnly: true, + overriddenTexture: luminanceTexture) } catch { print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") return @@ -397,7 +403,7 @@ private extension MoviePlayer { let chrominanceFramebuffer: Framebuffer do { chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, - orientation: .portrait, + orientation: originalOrientation, size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture) @@ -406,7 +412,15 @@ private extension MoviePlayer { return } - let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) + let portraitSize: GLSize + switch videoOrientation.rotationNeededForOrientation(.portrait) { + case .noRotation, .rotate180, .flipHorizontally, .flipVertically: + portraitSize = GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)) + case .rotateCounterclockwise, .rotateClockwise, .rotateClockwiseAndFlipVertically, .rotateClockwiseAndFlipHorizontally: + portraitSize = GLSize(width: GLint(bufferHeight), height: GLint(bufferWidth)) + } + + let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: portraitSize, textureOnly: false) convertYUVToRGB(shader: yuvConversionShader, luminanceFramebuffer: luminanceFramebuffer, @@ -493,4 +507,21 @@ public extension AVAsset { return .portrait } } + + // For original orientation is different with preferred image orientation when it is landscape + var originalOrientation: ImageOrientation? { + guard let videoTrack = tracks(withMediaType: AVMediaType.video).first else { + return nil + } + let trackTransform = videoTrack.preferredTransform + switch (trackTransform.a, trackTransform.b, trackTransform.c, trackTransform.d) { + case (1, 0, 0, 1): return .portrait + case (1, 0, 0, -1): return .portraitUpsideDown + case (0, 1, -1, 0): return .landscapeRight + case (0, -1, 1, 0): return .landscapeLeft + default: + print("ERROR: unsupport transform!\(trackTransform)") + return .portrait + } + } } From 518727e0e0d38651e56b5c817eece454d5f7d075 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 22 Aug 2019 01:23:11 +0800 Subject: [PATCH 153/332] refactor: extract YUV framebuffer generating logic to FramebufferGenerator --- .../Source/iOS/FramebufferGenerator.swift | 137 ++++++++++++++++++ framework/Source/iOS/MoviePlayer.swift | 125 ++-------------- 2 files changed, 148 insertions(+), 114 deletions(-) create mode 100644 framework/Source/iOS/FramebufferGenerator.swift diff --git a/framework/Source/iOS/FramebufferGenerator.swift b/framework/Source/iOS/FramebufferGenerator.swift new file mode 100644 index 00000000..5e083756 --- /dev/null +++ b/framework/Source/iOS/FramebufferGenerator.swift @@ -0,0 +1,137 @@ +// +// FramebufferGenerator.swift +// GPUImage2 +// +// Created by 陈品霖 on 2019/8/22. +// + +import CoreMedia + +public class FramebufferGenerator { + lazy var yuvConversionShader = _setupShader() + + public init() { + + } + + public func generateFromPixelBuffer(_ movieFrame: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { + var framebuffer: Framebuffer? + sharedImageProcessingContext.runOperationSynchronously { + framebuffer = _generateFromPixelBuffer(movieFrame, frameTime: frameTime, videoOrientation: videoOrientation) + } + return framebuffer + } +} + +private extension FramebufferGenerator { + func _setupShader() -> ShaderProgram? { + var yuvConversionShader: ShaderProgram? + sharedImageProcessingContext.runOperationSynchronously { + yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { + try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), + fragmentShader: YUVConversionFullRangeFragmentShader) + } + } + return yuvConversionShader + } + + func _generateFromPixelBuffer(_ movieFrame: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { + guard let yuvConversionShader = yuvConversionShader else { + debugPrint("ERROR! yuvConversionShader hasn't been setup before starting") + return nil + } + let originalOrientation = videoOrientation.originalOrientation + let bufferHeight = CVPixelBufferGetHeight(movieFrame) + let bufferWidth = CVPixelBufferGetWidth(movieFrame) + let conversionMatrix = colorConversionMatrix601FullRangeDefault + CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + defer { + CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + CVOpenGLESTextureCacheFlush(sharedImageProcessingContext.coreVideoTextureCache, 0) + } + + glActiveTexture(GLenum(GL_TEXTURE0)) + var luminanceGLTexture: CVOpenGLESTexture? + let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) + if luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil { + print("Could not create LuminanceGLTexture") + return nil + } + + let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!) + + glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); + + let luminanceFramebuffer: Framebuffer + do { + luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, + orientation: originalOrientation, + size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), + textureOnly: true, + overriddenTexture: luminanceTexture) + } catch { + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return nil + } + + glActiveTexture(GLenum(GL_TEXTURE1)) + var chrominanceGLTexture: CVOpenGLESTexture? + let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) + + if chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil { + print("Could not create ChrominanceGLTexture") + return nil + } + + let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!) + + glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); + + let chrominanceFramebuffer: Framebuffer + do { + chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, + orientation: originalOrientation, + size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), + textureOnly: true, + overriddenTexture: chrominanceTexture) + } catch { + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return nil + } + + let portraitSize: GLSize + switch videoOrientation.rotationNeededForOrientation(.portrait) { + case .noRotation, .rotate180, .flipHorizontally, .flipVertically: + portraitSize = GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)) + case .rotateCounterclockwise, .rotateClockwise, .rotateClockwiseAndFlipVertically, .rotateClockwiseAndFlipHorizontally: + portraitSize = GLSize(width: GLint(bufferHeight), height: GLint(bufferWidth)) + } + + let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: portraitSize, textureOnly: false) + + convertYUVToRGB(shader: yuvConversionShader, + luminanceFramebuffer: luminanceFramebuffer, + chrominanceFramebuffer: chrominanceFramebuffer, + resultFramebuffer: framebuffer, + colorConversionMatrix: conversionMatrix) + framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(frameTime)) + return framebuffer + } +} + +public extension ImageOrientation { + var originalOrientation: ImageOrientation { + switch self { + case .portrait, .portraitUpsideDown: + return self + case .landscapeLeft: + return .landscapeRight + case .landscapeRight: + return .landscapeLeft + } + } +} diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index ca2ac0f5..03b5ab7e 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -39,9 +39,7 @@ public class MoviePlayer: AVPlayer, ImageSource { var videoOutput: AVPlayerItemVideoOutput? var displayLink: CADisplayLink? - lazy var yuvConversionShader: ShaderProgram? = { - _setupShader() - }() + lazy var framebufferGenerator = FramebufferGenerator() var totalTimeObservers = [MoviePlayerTimeObserver]() var timeObserversQueue = [MoviePlayerTimeObserver]() @@ -250,17 +248,6 @@ public class MoviePlayer: AVPlayer, ImageSource { } private extension MoviePlayer { - func _setupShader() -> ShaderProgram? { - var yuvConversionShader: ShaderProgram? - sharedImageProcessingContext.runOperationSynchronously { - yuvConversionShader = crashOnShaderCompileFailure("MoviePlayer") { - try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), - fragmentShader: YUVConversionFullRangeFragmentShader) - } - } - return yuvConversionShader - } - func _setupDisplayLinkIfNeeded() { if displayLink == nil { displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallback)) @@ -338,115 +325,25 @@ private extension MoviePlayer { // MARK: Internal processing functions func _process(movieFrame: CVPixelBuffer, with sampleTime: CMTime) { - guard let yuvConversionShader = yuvConversionShader else { - debugPrint("ERROR! yuvConversionShader hasn't been setup before starting") - return - } delegate?.moviePlayerDidReadPixelBuffer(movieFrame, time: CMTimeGetSeconds(sampleTime)) - let bufferHeight = CVPixelBufferGetHeight(movieFrame) - let bufferWidth = CVPixelBufferGetWidth(movieFrame) - CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - - let conversionMatrix = colorConversionMatrix601FullRangeDefault - let startTime = CFAbsoluteTimeGetCurrent() - - var luminanceGLTexture: CVOpenGLESTexture? - - let originalOrientation = asset?.originalOrientation ?? .portrait - - glActiveTexture(GLenum(GL_TEXTURE0)) - - let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) - - if(luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil) { - print("Could not create LuminanceGLTexture") - return - } - - let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!) - - glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture) - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); - - let luminanceFramebuffer: Framebuffer - do { - luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, - orientation: originalOrientation, - size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), - textureOnly: true, - overriddenTexture: luminanceTexture) - } catch { - print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") - return - } - - var chrominanceGLTexture: CVOpenGLESTexture? - - glActiveTexture(GLenum(GL_TEXTURE1)) - - let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) - - if(chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil) { - print("Could not create ChrominanceGLTexture") - return - } - - let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!) - - glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture) - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); - - let chrominanceFramebuffer: Framebuffer - do { - chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, - orientation: originalOrientation, - size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), - textureOnly: true, - overriddenTexture: chrominanceTexture) - } catch { - print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") - return + if runBenchmark || logEnabled { + totalFramesSent += 1 } - - let portraitSize: GLSize - switch videoOrientation.rotationNeededForOrientation(.portrait) { - case .noRotation, .rotate180, .flipHorizontally, .flipVertically: - portraitSize = GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)) - case .rotateCounterclockwise, .rotateClockwise, .rotateClockwiseAndFlipVertically, .rotateClockwiseAndFlipHorizontally: - portraitSize = GLSize(width: GLint(bufferHeight), height: GLint(bufferWidth)) + defer { + if runBenchmark { + let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) + totalFrameTime += currentFrameTime + print("Average frame time :\(1000.0 * totalFrameTime / Double(totalFramesSent)) ms") + print("Current frame time :\(1000.0 * currentFrameTime) ms") + } } - let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: portraitSize, textureOnly: false) - - convertYUVToRGB(shader: yuvConversionShader, - luminanceFramebuffer: luminanceFramebuffer, - chrominanceFramebuffer: chrominanceFramebuffer, - resultFramebuffer: framebuffer, - colorConversionMatrix: conversionMatrix) - CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - - framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(sampleTime)) + guard let framebuffer = framebufferGenerator.generateFromPixelBuffer(movieFrame, frameTime: sampleTime, videoOrientation: videoOrientation) else { return } framebuffer.userInfo = framebufferUserInfo updateTargetsWithFramebuffer(framebuffer) - - // Clean up - CVOpenGLESTextureCacheFlush(sharedImageProcessingContext.coreVideoTextureCache, 0) - - if(runBenchmark || logEnabled) { - totalFramesSent += 1 - } - - if runBenchmark { - let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) - totalFrameTime += currentFrameTime - print("Average frame time :\(1000.0 * totalFrameTime / Double(totalFramesSent)) ms") - print("Current frame time :\(1000.0 * currentFrameTime) ms") - } } @objc func displayLinkCallback(displayLink: CADisplayLink) { From 4dbdc6f78598cf62c6cf9d2f3630806892da317e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 22 Aug 2019 19:25:59 +0800 Subject: [PATCH 154/332] feat: add framebuffer convertToPixelBuffer --- .../Source/iOS/FramebufferGenerator.swift | 87 +++++++++++++++++-- framework/Source/iOS/MoviePlayer.swift | 2 +- 2 files changed, 79 insertions(+), 10 deletions(-) diff --git a/framework/Source/iOS/FramebufferGenerator.swift b/framework/Source/iOS/FramebufferGenerator.swift index 5e083756..f8fe4bd4 100644 --- a/framework/Source/iOS/FramebufferGenerator.swift +++ b/framework/Source/iOS/FramebufferGenerator.swift @@ -9,18 +9,29 @@ import CoreMedia public class FramebufferGenerator { lazy var yuvConversionShader = _setupShader() + private(set) var outputSize: GLSize? + private(set) var pixelBufferPool: CVPixelBufferPool? + private var renderFramebuffer: Framebuffer? public init() { } - public func generateFromPixelBuffer(_ movieFrame: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { + public func generateFromYUVBuffer(_ yuvPixelBuffer: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { var framebuffer: Framebuffer? sharedImageProcessingContext.runOperationSynchronously { - framebuffer = _generateFromPixelBuffer(movieFrame, frameTime: frameTime, videoOrientation: videoOrientation) + framebuffer = _generateFromYUVBuffer(yuvPixelBuffer, frameTime: frameTime, videoOrientation: videoOrientation) } return framebuffer } + + public func convertToPixelBuffer(_ framebuffer: Framebuffer) -> CVPixelBuffer? { + var pixelBuffer: CVPixelBuffer? + sharedImageProcessingContext.runOperationSynchronously { + pixelBuffer = _convertToPixelBuffer(framebuffer) + } + return pixelBuffer + } } private extension FramebufferGenerator { @@ -35,24 +46,24 @@ private extension FramebufferGenerator { return yuvConversionShader } - func _generateFromPixelBuffer(_ movieFrame: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { + func _generateFromYUVBuffer(_ yuvPixelBuffer: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { guard let yuvConversionShader = yuvConversionShader else { debugPrint("ERROR! yuvConversionShader hasn't been setup before starting") return nil } let originalOrientation = videoOrientation.originalOrientation - let bufferHeight = CVPixelBufferGetHeight(movieFrame) - let bufferWidth = CVPixelBufferGetWidth(movieFrame) + let bufferHeight = CVPixelBufferGetHeight(yuvPixelBuffer) + let bufferWidth = CVPixelBufferGetWidth(yuvPixelBuffer) let conversionMatrix = colorConversionMatrix601FullRangeDefault - CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + CVPixelBufferLockBaseAddress(yuvPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) defer { - CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + CVPixelBufferUnlockBaseAddress(yuvPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) CVOpenGLESTextureCacheFlush(sharedImageProcessingContext.coreVideoTextureCache, 0) } glActiveTexture(GLenum(GL_TEXTURE0)) var luminanceGLTexture: CVOpenGLESTexture? - let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) + let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, yuvPixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) if luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil { print("Could not create LuminanceGLTexture") return nil @@ -78,7 +89,7 @@ private extension FramebufferGenerator { glActiveTexture(GLenum(GL_TEXTURE1)) var chrominanceGLTexture: CVOpenGLESTexture? - let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) + let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, yuvPixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) if chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil { print("Could not create ChrominanceGLTexture") @@ -121,6 +132,64 @@ private extension FramebufferGenerator { framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(frameTime)) return framebuffer } + + func _convertToPixelBuffer(_ framebuffer: Framebuffer) -> CVPixelBuffer? { + if pixelBufferPool == nil || outputSize?.width != framebuffer.size.width || outputSize?.height != framebuffer.size.height { + outputSize = framebuffer.size + pixelBufferPool = _createPixelBufferPool(framebuffer.size.width, framebuffer.size.height, FourCharCode(kCVPixelFormatType_32BGRA), 3) + } + guard let pixelBufferPool = pixelBufferPool else { return nil } + var outPixelBuffer: CVPixelBuffer? + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &outPixelBuffer) + guard let pixelBuffer = outPixelBuffer, pixelBufferStatus == kCVReturnSuccess else { + print("WARNING: Unable to create pixel buffer, dropping frame") + return nil + } + + do { + if renderFramebuffer == nil { + CVBufferSetAttachment(pixelBuffer, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) + } + + let bufferSize = framebuffer.size + var cachedTextureRef: CVOpenGLESTexture? + let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) + + renderFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + + renderFramebuffer?.activateFramebufferForRendering() + clearFramebufferWithColor(Color.black) + CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: sharedImageProcessingContext) + + glFinish() + } + catch { + print("WARNING: Trouble appending pixel buffer at time: \(framebuffer.timingStyle.timestamp?.seconds() ?? 0) \(error)") + } + + CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + return pixelBuffer + } + + func _createPixelBufferPool(_ width: Int32, _ height: Int32, _ pixelFormat: FourCharCode, _ maxBufferCount: Int32) -> CVPixelBufferPool? { + var outputPool: CVPixelBufferPool? = nil + + let sourcePixelBufferOptions: NSDictionary = [kCVPixelBufferPixelFormatTypeKey: pixelFormat, + kCVPixelBufferWidthKey: width, + kCVPixelBufferHeightKey: height, + kCVPixelFormatOpenGLESCompatibility: true, + kCVPixelBufferIOSurfacePropertiesKey: NSDictionary()] + + let pixelBufferPoolOptions: NSDictionary = [kCVPixelBufferPoolMinimumBufferCountKey: maxBufferCount] + + CVPixelBufferPoolCreate(kCFAllocatorDefault, pixelBufferPoolOptions, sourcePixelBufferOptions, &outputPool) + + return outputPool + } } public extension ImageOrientation { diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 03b5ab7e..fc526eee 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -340,7 +340,7 @@ private extension MoviePlayer { } } - guard let framebuffer = framebufferGenerator.generateFromPixelBuffer(movieFrame, frameTime: sampleTime, videoOrientation: videoOrientation) else { return } + guard let framebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: sampleTime, videoOrientation: videoOrientation) else { return } framebuffer.userInfo = framebufferUserInfo updateTargetsWithFramebuffer(framebuffer) From 76c6982f2717d9a99b1fc7dcf45f33b1ce160c4c Mon Sep 17 00:00:00 2001 From: Kubrick G Date: Mon, 2 Sep 2019 22:44:54 +0800 Subject: [PATCH 155/332] feat(video-streaming): add streaming ability to output file. --- framework/Source/iOS/MovieOutput.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 739c8338..ab0104a5 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -104,6 +104,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.size = size assetWriter = try AVAssetWriter(url:URL, fileType:fileType) + assetWriter.shouldOptimizeForNetworkUse = true var localSettings:[String:Any] if let videoSettings = videoSettings { From 5345795b6113e3b7a287838c1eac5efceefc919e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 3 Sep 2019 14:54:41 +0800 Subject: [PATCH 156/332] improve: use video track preferredTransform for assetwriter when transcoding --- framework/Source/iOS/MovieInput.swift | 8 +++++++- framework/Source/iOS/MovieOutput.swift | 5 +++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index d5ff188b..6889f409 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -75,7 +75,13 @@ public class MovieInput: ImageSource { var audioInputStatusObserver:NSKeyValueObservation? public var useRealtimeThreads = false - public var transcodingOnly = false + public var transcodingOnly = false { + didSet { + if transcodingOnly, let movieOutput = synchronizedMovieOutput, let transform = asset.tracks(withMediaType: .video).first?.preferredTransform { + movieOutput.preferredTransform = transform + } + } + } var timebaseInfo = mach_timebase_info_data_t() var currentThread:Thread? diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index ab0104a5..56a0f5ac 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -87,6 +87,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var shouldWaitForEncoding: Bool { return !encodingLiveVideo || waitUtilDataIsReadyForLiveVideo } + var preferredTransform: CGAffineTransform? public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil, keepLastPixelBuffer: Bool = false) throws { @@ -166,6 +167,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } self.observations.append(observation) + if let preferredTransform = self.preferredTransform { + self.assetWriterVideoInput.transform = preferredTransform + } + var success = false try NSObject.catchException { success = self.assetWriter.startWriting() From ab9c7533833d6e4e6cebf5f029ab8dee3e6bb1af Mon Sep 17 00:00:00 2001 From: Kubrick G Date: Wed, 4 Sep 2019 17:09:57 +0800 Subject: [PATCH 157/332] feat(frame-rate): drop frame if maxFPS is provided and exceed. --- framework/Source/iOS/MovieInput.swift | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 6889f409..f1ba9cb7 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -73,6 +73,7 @@ public class MovieInput: ImageSource { var readingShouldWait = false var videoInputStatusObserver:NSKeyValueObservation? var audioInputStatusObserver:NSKeyValueObservation? + let maxFPS: Float? public var useRealtimeThreads = false public var transcodingOnly = false { @@ -94,7 +95,7 @@ public class MovieInput: ImageSource { public var framebufferUserInfo:[AnyHashable:Any]? // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works - public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false, playrate:Double = 1.0, audioSettings:[String:Any]? = nil) throws { + public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false, playrate:Double = 1.0, audioSettings:[String:Any]? = nil, maxFPS: Float? = nil) throws { debugPrint("movie input init \(asset)") self.asset = asset @@ -104,6 +105,7 @@ public class MovieInput: ImageSource { self.playrate = playrate self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} self.audioSettings = audioSettings + self.maxFPS = maxFPS } public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false, playrate: Double = 1.0, audioSettings:[String:Any]? = nil) throws { @@ -334,6 +336,9 @@ public class MovieInput: ImageSource { } var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + if let fps = maxFPS, abs(currentSampleTime.seconds.remainder(dividingBy: Double(1 / fps))) > Double(1 / fps / 4) { + return + } var duration = self.asset.duration // Only used for the progress block so its acuracy is not critical self.synchronizedEncodingDebugPrint("Process video frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") From 60e5497c12dcec337547fb455355087e623ffbe8 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 11 Sep 2019 21:48:18 +0800 Subject: [PATCH 158/332] improve(MovieInput): support video that original orientation is not portrait --- framework/Source/iOS/MovieInput.swift | 89 +++------------------------ 1 file changed, 10 insertions(+), 79 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index f1ba9cb7..6a0beda0 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -74,6 +74,7 @@ public class MovieInput: ImageSource { var videoInputStatusObserver:NSKeyValueObservation? var audioInputStatusObserver:NSKeyValueObservation? let maxFPS: Float? + lazy var framebufferGenerator = FramebufferGenerator() public var useRealtimeThreads = false public var transcodingOnly = false { @@ -123,6 +124,10 @@ public class MovieInput: ImageSource { self.videoInputStatusObserver?.invalidate() self.audioInputStatusObserver?.invalidate() } + + public var videoOrientation: ImageOrientation { + return asset.imageOrientation ?? .portrait + } // MARK: - // MARK: Playback control @@ -416,86 +421,12 @@ public class MovieInput: ImageSource { } func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { - let bufferHeight = CVPixelBufferGetHeight(movieFrame) - let bufferWidth = CVPixelBufferGetWidth(movieFrame) - CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - - let conversionMatrix = colorConversionMatrix601FullRangeDefault - // TODO: Get this color query working - // if let colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, nil) { - // if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == .EqualTo) { - // _preferredConversion = kColorConversion601FullRange - // } else { - // _preferredConversion = kColorConversion709 - // } - // } else { - // _preferredConversion = kColorConversion601FullRange - // } - - let startTime = CFAbsoluteTimeGetCurrent() - - var luminanceGLTexture: CVOpenGLESTexture? - - glActiveTexture(GLenum(GL_TEXTURE0)) - - let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) - - if(luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil) { - print("Could not create LuminanceGLTexture") - return - } - - let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!) - - glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture) - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); - - let luminanceFramebuffer: Framebuffer - do { - luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture) - } catch { - print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") - return - } - - var chrominanceGLTexture: CVOpenGLESTexture? - - glActiveTexture(GLenum(GL_TEXTURE1)) - - let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) - - if(chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil) { - print("Could not create ChrominanceGLTexture") - return - } - - let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!) - - glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture) - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); - - let chrominanceFramebuffer: Framebuffer - do { - chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture) - } catch { - print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") - return - } - - self.movieFramebuffer?.unlock() - let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) - movieFramebuffer.lock() - - convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) - CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - - movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) - movieFramebuffer.userInfo = self.framebufferUserInfo - self.movieFramebuffer = movieFramebuffer + let startTime = CACurrentMediaTime() - self.updateTargetsWithFramebuffer(movieFramebuffer) + guard let framebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: withSampleTime, videoOrientation: videoOrientation) else { return } + framebuffer.userInfo = framebufferUserInfo + self.movieFramebuffer = framebuffer + self.updateTargetsWithFramebuffer(framebuffer) if(self.runBenchmark || self.synchronizedEncodingDebug) { self.totalFramesSent += 1 From af1782441302e83e433bfdda527f522f858802d2 Mon Sep 17 00:00:00 2001 From: Kubrick G Date: Wed, 4 Sep 2019 20:27:37 +0800 Subject: [PATCH 159/332] fix(movie-input): fix drop frame logic --- framework/Source/iOS/MovieInput.swift | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index f1ba9cb7..ea9eec0e 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -336,19 +336,19 @@ public class MovieInput: ImageSource { } var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - if let fps = maxFPS, abs(currentSampleTime.seconds.remainder(dividingBy: Double(1 / fps))) > Double(1 / fps / 4) { + if let fps = maxFPS, let currentTime = currentTime, (currentSampleTime.seconds - currentTime.seconds) < 1 / Double(fps) - 0.0000001 { return } var duration = self.asset.duration // Only used for the progress block so its acuracy is not critical self.synchronizedEncodingDebugPrint("Process video frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") + self.currentTime = currentSampleTime + if transcodingOnly, let movieOutput = synchronizedMovieOutput { movieOutput.processVideoBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) return } - self.currentTime = currentSampleTime - if let startTime = self.startTime { // Make sure our samples start at kCMTimeZero if the video was started midway. currentSampleTime = CMTimeSubtract(currentSampleTime, startTime) From 2cd65632ab0c97db4820e0903d554257fe97cffb Mon Sep 17 00:00:00 2001 From: Kubrick G Date: Wed, 11 Sep 2019 21:58:38 +0800 Subject: [PATCH 160/332] chore(drop-frame): add note. --- framework/Source/iOS/MovieInput.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index ea9eec0e..322bd793 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -336,6 +336,7 @@ public class MovieInput: ImageSource { } var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + // NOTE: When calculating frame pre second, floating point maybe rounded, so we have to add tolerance manually if let fps = maxFPS, let currentTime = currentTime, (currentSampleTime.seconds - currentTime.seconds) < 1 / Double(fps) - 0.0000001 { return } From 3feafff20ef397440966af7fed35e10ce8f21e17 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 10 Sep 2019 21:52:27 +0800 Subject: [PATCH 161/332] improve(transcoding): parallelize asset reader thread and render thread to improve transcoding performance --- framework/Source/iOS/MovieInput.swift | 102 ++++++++++++++++--------- framework/Source/iOS/MovieOutput.swift | 33 ++++---- 2 files changed, 84 insertions(+), 51 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 322bd793..2ede92e9 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -297,22 +297,32 @@ public class MovieInput: ImageSource { assetReader.cancelReading() - // Since only the main thread will cancel and create threads jump onto it to prevent - // the current thread from being cancelled in between the below if statement and creating the new thread. - DispatchQueue.main.async { - // Start the video over so long as it wasn't cancelled. - if (self.loop && !thread.isCancelled) { - self.currentThread = Thread(target: self, selector: #selector(self.beginReading), object: nil) - self.currentThread?.start() - } - else { - self.delegate?.didFinishMovie() - self.completion?(nil) + let readerPostAction = { + // Since only the main thread will cancel and create threads jump onto it to prevent + // the current thread from being cancelled in between the below if statement and creating the new thread. + DispatchQueue.main.async { + assetReader.cancelReading() - self.synchronizedEncodingDebugPrint("MovieInput finished reading") - self.synchronizedEncodingDebugPrint("MovieInput total frames sent: \(self.totalFramesSent)") + // Start the video over so long as it wasn't cancelled. + if (self.loop && !thread.isCancelled) { + self.currentThread = Thread(target: self, selector: #selector(self.beginReading), object: nil) + self.currentThread?.start() + } + else { + self.synchronizedEncodingDebugPrint("MovieInput finished reading") + self.synchronizedEncodingDebugPrint("MovieInput total frames sent: \(self.totalFramesSent)") + self.delegate?.didFinishMovie() + self.completion?(nil) + } } } + + if synchronizedMovieOutput != nil { + // Make sure all image processing task is finished when encoding + sharedImageProcessingContext.runOperationAsynchronously(readerPostAction) + } else { + readerPostAction() + } } func readNextVideoFrame(with assetReader: AVAssetReader, from videoTrackOutput:AVAssetReaderOutput) { @@ -322,8 +332,7 @@ public class MovieInput: ImageSource { // Documentation: "Clients that are monitoring each input's readyForMoreMediaData value must call markAsFinished on an input when they are done // appending buffers to it. This is necessary to prevent other inputs from stalling, as they may otherwise wait forever // for that input's media data, attempting to complete the ideal interleaving pattern." - movieOutput.videoEncodingIsFinished = true - movieOutput.assetWriterVideoInput.markAsFinished() + movieOutput.markIsFinishedAfterProcessing = true } } return @@ -336,44 +345,63 @@ public class MovieInput: ImageSource { } var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - // NOTE: When calculating frame pre second, floating point maybe rounded, so we have to add tolerance manually - if let fps = maxFPS, let currentTime = currentTime, (currentSampleTime.seconds - currentTime.seconds) < 1 / Double(fps) - 0.0000001 { - return - } - var duration = self.asset.duration // Only used for the progress block so its acuracy is not critical - self.synchronizedEncodingDebugPrint("Process video frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") - - self.currentTime = currentSampleTime + currentTime = currentSampleTime if transcodingOnly, let movieOutput = synchronizedMovieOutput { movieOutput.processVideoBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) return } - if let startTime = self.startTime { + var duration = asset.duration // Only used for the progress block so its acuracy is not critical + if let startTime = startTime { // Make sure our samples start at kCMTimeZero if the video was started midway. currentSampleTime = CMTimeSubtract(currentSampleTime, startTime) - if let trimmedDuration = self.trimmedDuration, startTime.seconds > 0, CMTimeAdd(startTime, trimmedDuration) <= duration { + if let trimmedDuration = trimmedDuration, startTime.seconds > 0, CMTimeAdd(startTime, trimmedDuration) <= duration { duration = trimmedDuration } else { duration = CMTimeSubtract(duration, startTime) } } - if (self.playAtActualSpeed) { + // NOTE: When calculating frame pre second, floating point maybe rounded, so we have to add tolerance manually + if let fps = maxFPS, let currentTime = currentTime, (currentSampleTime.seconds - currentTime.seconds) < 1 / Double(fps) - 0.0000001 { + return + } + + progress?(currentSampleTime.seconds/duration.seconds) + + if synchronizedMovieOutput != nil { + // For synchrozied transcoding, separate AVAssetReader thread and OpenGL thread to improve performance + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + self?.processNextVideoSampleOnGLThread(sampleBuffer, currentSampleTime: currentSampleTime) + CMSampleBufferInvalidate(sampleBuffer) + } + } else { + processNextVideoSampleOnGLThread(sampleBuffer, currentSampleTime: currentSampleTime) + CMSampleBufferInvalidate(sampleBuffer) + } + } + + func processNextVideoSampleOnGLThread(_ sampleBuffer: CMSampleBuffer, currentSampleTime: CMTime) { + + synchronizedEncodingDebugPrint("Process video frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") + + if playAtActualSpeed { let currentSampleTimeNanoseconds = Int64(currentSampleTime.seconds * 1_000_000_000 / playrate) let currentActualTime = DispatchTime.now() - if(self.actualStartTime == nil) { self.actualStartTime = currentActualTime } + if actualStartTime == nil { + actualStartTime = currentActualTime + } // Determine how much time we need to wait in order to display the frame at the right currentActualTime such that it will match the currentSampleTime. // The reason we subtract the actualStartTime from the currentActualTime is so the actual time starts at zero relative to the video start. - let delay = currentSampleTimeNanoseconds - Int64(currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds) + let delay = currentSampleTimeNanoseconds - Int64(currentActualTime.uptimeNanoseconds - actualStartTime!.uptimeNanoseconds) //print("currentSampleTime: \(currentSampleTimeNanoseconds) currentTime: \((currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds)) delay: \(delay)") - if(delay > 0) { - mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(delay))) + if delay > 0 { + mach_wait_until(mach_absolute_time() + nanosToAbs(UInt64(delay))) } else { // This only happens if we aren't given enough processing time for playback @@ -385,11 +413,8 @@ public class MovieInput: ImageSource { } } - self.progress?(currentSampleTime.seconds/duration.seconds) - - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { self.process(movieFrame:sampleBuffer) - CMSampleBufferInvalidate(sampleBuffer) } } @@ -406,7 +431,14 @@ public class MovieInput: ImageSource { self.synchronizedEncodingDebugPrint("Process audio sample input. Time:\(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)))") - self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: !transcodingOnly) + if synchronizedMovieOutput != nil { + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + guard let self = self else { return } + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: !self.transcodingOnly) + } + } else { + audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: !transcodingOnly) + } } func process(movieFrame frame:CMSampleBuffer) { diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 56a0f5ac..25de45fb 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -55,6 +55,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private var isRecording = false var videoEncodingIsFinished = false var audioEncodingIsFinished = false + var markIsFinishedAfterProcessing = false private var startFrameTime: CMTime? public var recordedDuration: CMTime? private var previousFrameTime: CMTime? @@ -267,7 +268,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); - let work = { + let work = { [weak self] in + guard let self = self else { return } // Discard first n frames if self.dropFirstFrames > 0 { self.dropFirstFrames -= 1 @@ -307,6 +309,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Better to poll isReadyForMoreMediaData often since when it does become true // we don't want to risk letting framebuffers pile up in between poll intervals. usleep(100000) // 0.1 seconds + if self.markIsFinishedAfterProcessing { + self.synchronizedEncodingDebugPrint("set videoEncodingIsFinished to true after processing") + self.markIsFinishedAfterProcessing = false + self.videoEncodingIsFinished = true + } } if self.keepLastPixelBuffer { @@ -345,6 +352,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { sharedImageProcessingContext.runOperationAsynchronously { framebuffer.unlock() } + if self.videoEncodingIsFinished { + self.assetWriterVideoInput.markAsFinished() + } } if(self.encodingLiveVideo) { @@ -472,12 +482,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { let work = { - var shouldInvalidate = shouldInvalidateSampleWhenDone - defer { - if(shouldInvalidate) { - CMSampleBufferInvalidate(sampleBuffer) - } - } + self.pendingAudioBuffers.append(sampleBuffer) guard self.isRecording, self.assetWriter.status == .writing, @@ -499,10 +504,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { usleep(100000) } - self.pendingAudioBuffers.append(sampleBuffer) guard self.previousFrameTime != nil else { self.synchronizedEncodingDebugPrint("Add audio sample to pending queue but first video frame is not ready yet. Time:\(CMTimeGetSeconds(currentSampleTime))") - shouldInvalidate = false return } @@ -510,11 +513,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { do { try NSObject.catchException { - while self.pendingAudioBuffers.count > 0 { - let audioBuffer = self.pendingAudioBuffers.removeFirst() + while let audioBuffer = self.pendingAudioBuffers.first { if (!assetWriterAudioInput.append(audioBuffer)) { print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") } + self.pendingAudioBuffers.removeFirst() + if shouldInvalidateSampleWhenDone { + CMSampleBufferInvalidate(sampleBuffer) + } } } } @@ -527,11 +533,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { movieProcessingContext.runOperationAsynchronously(work) } else { - // Process pending audio buffers at first - while pendingAudioBuffers.count > 0 { - let audioBuffer = pendingAudioBuffers.removeFirst() - processAudioBuffer(audioBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) - } work() } } From 24818bd2ff8e04f91236940e3d6e57cd7fe1db37 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 10 Sep 2019 23:30:11 +0800 Subject: [PATCH 162/332] improve(MovieInput): support pause and resume --- framework/Source/iOS/MovieInput.swift | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 2ede92e9..b4ae35a3 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -162,6 +162,20 @@ public class MovieInput: ImageSource { self.requestedStartTime = self.currentTime } + public func pauseWithoutCancel() { + requestedStartTime = currentTime + conditionLock.lock() + readingShouldWait = true + conditionLock.unlock() + } + + public func resume() { + conditionLock.lock() + readingShouldWait = false + conditionLock.signal() + conditionLock.unlock() + } + // MARK: - // MARK: Internal processing functions From 00ab883c46fe5e4be690c1b9821c27ae989a7301 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 18 Sep 2019 15:58:02 +0800 Subject: [PATCH 163/332] fix(MovieOutput): fix audio sample buffer is not released correctly --- framework/Source/iOS/MovieOutput.swift | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 25de45fb..12b3c36b 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -482,13 +482,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { let work = { - self.pendingAudioBuffers.append(sampleBuffer) - guard self.isRecording, self.assetWriter.status == .writing, !self.audioEncodingIsFinished, let assetWriterAudioInput = self.assetWriterAudioInput else { self.synchronizedEncodingDebugPrint("Guard fell through, dropping audio sample") + if shouldInvalidateSampleWhenDone { + CMSampleBufferInvalidate(sampleBuffer) + } return } @@ -496,9 +497,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { guard (assetWriterAudioInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { print("Had to drop a audio sample at time \(currentSampleTime)") + if shouldInvalidateSampleWhenDone { + CMSampleBufferInvalidate(sampleBuffer) + } return } + self.pendingAudioBuffers.append(sampleBuffer) + while(!assetWriterAudioInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.audioEncodingIsFinished) { self.synchronizedEncodingDebugPrint("Audio waiting...") usleep(100000) @@ -519,7 +525,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } self.pendingAudioBuffers.removeFirst() if shouldInvalidateSampleWhenDone { - CMSampleBufferInvalidate(sampleBuffer) + CMSampleBufferInvalidate(audioBuffer) } } } From 110622c149c451848b3fecfdb07f923b0af1ab7e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 20 Sep 2019 18:29:53 +0800 Subject: [PATCH 164/332] fix(transcoding): fix audio is missing the last part --- framework/Source/iOS/MovieInput.swift | 14 ++++++-------- framework/Source/iOS/MovieOutput.swift | 3 ++- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 417dc90c..63e3f3d4 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -389,9 +389,9 @@ public class MovieInput: ImageSource { progress?(currentSampleTime.seconds/duration.seconds) - if synchronizedMovieOutput != nil { + if let movieOutput = synchronizedMovieOutput { // For synchrozied transcoding, separate AVAssetReader thread and OpenGL thread to improve performance - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + movieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in self?.processNextVideoSampleOnGLThread(sampleBuffer, currentSampleTime: currentSampleTime) CMSampleBufferInvalidate(sampleBuffer) } @@ -432,9 +432,7 @@ public class MovieInput: ImageSource { } } - sharedImageProcessingContext.runOperationSynchronously { - self.process(movieFrame:sampleBuffer) - } + process(movieFrame:sampleBuffer) } func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput:AVAssetReaderOutput) { @@ -450,8 +448,8 @@ public class MovieInput: ImageSource { self.synchronizedEncodingDebugPrint("Process audio sample input. Time:\(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)))") - if synchronizedMovieOutput != nil { - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + if let movieOutput = self.synchronizedMovieOutput { + movieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in guard let self = self else { return } self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: !self.transcodingOnly) } @@ -535,7 +533,7 @@ public class MovieInput: ImageSource { self.conditionLock.lock() // Allow reading if either input is able to accept data, prevent reading if both inputs are unable to accept data. - if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData || movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { + if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData || movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData == true) { self.readingShouldWait = false self.conditionLock.signal() } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 12b3c36b..24866029 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -520,8 +520,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { do { try NSObject.catchException { while let audioBuffer = self.pendingAudioBuffers.first { - if (!assetWriterAudioInput.append(audioBuffer)) { + guard assetWriterAudioInput.append(audioBuffer) else { print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + break } self.pendingAudioBuffers.removeFirst() if shouldInvalidateSampleWhenDone { From bf6bedbd5dabdf41bc6bf6bc4ddc3fd23f54216f Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 21 Sep 2019 01:29:44 +0800 Subject: [PATCH 165/332] fix: revert commit 110622c, using a better fix for audio missing last part --- framework/Source/iOS/MovieInput.swift | 10 ++++++---- framework/Source/iOS/MovieOutput.swift | 3 +-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 63e3f3d4..b168aea1 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -389,9 +389,9 @@ public class MovieInput: ImageSource { progress?(currentSampleTime.seconds/duration.seconds) - if let movieOutput = synchronizedMovieOutput { + if synchronizedMovieOutput != nil { // For synchrozied transcoding, separate AVAssetReader thread and OpenGL thread to improve performance - movieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in self?.processNextVideoSampleOnGLThread(sampleBuffer, currentSampleTime: currentSampleTime) CMSampleBufferInvalidate(sampleBuffer) } @@ -432,7 +432,9 @@ public class MovieInput: ImageSource { } } - process(movieFrame:sampleBuffer) + sharedImageProcessingContext.runOperationSynchronously { + self.process(movieFrame:sampleBuffer) + } } func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput:AVAssetReaderOutput) { @@ -533,7 +535,7 @@ public class MovieInput: ImageSource { self.conditionLock.lock() // Allow reading if either input is able to accept data, prevent reading if both inputs are unable to accept data. - if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData || movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData == true) { + if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData || movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { self.readingShouldWait = false self.conditionLock.signal() } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 24866029..12b3c36b 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -520,9 +520,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { do { try NSObject.catchException { while let audioBuffer = self.pendingAudioBuffers.first { - guard assetWriterAudioInput.append(audioBuffer) else { + if (!assetWriterAudioInput.append(audioBuffer)) { print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") - break } self.pendingAudioBuffers.removeFirst() if shouldInvalidateSampleWhenDone { From 0b001a54c1a9965f4126c92ffd0051506c71da48 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 24 Sep 2019 03:39:05 +0800 Subject: [PATCH 166/332] improve: support using AVPlayerLayer with MoviePlayer --- framework/Source/iOS/MoviePlayer.swift | 47 +++++++++++++++----------- 1 file changed, 28 insertions(+), 19 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index fc526eee..f8319450 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -77,6 +77,7 @@ public class MoviePlayer: AVPlayer, ImageSource { } var nextSeeking: SeekingInfo? public var isSeeking = false + public var disableGPURender = false public override init() { debugPrint("movie player init") @@ -114,12 +115,16 @@ public class MoviePlayer: AVPlayer, ImageSource { self.playerItem = item self.asset = item?.asset if let item = item { - let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] - let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) - videoOutput.suppressesPlayerRendering = true - item.add(videoOutput) - item.audioTimePitchAlgorithm = .varispeed - self.videoOutput = videoOutput + if !disableGPURender { + let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] + let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) + videoOutput.suppressesPlayerRendering = true + item.add(videoOutput) + item.audioTimePitchAlgorithm = .varispeed + self.videoOutput = videoOutput + } else { + self.videoOutput = nil + } _setupPlayerObservers() } else { self.videoOutput = nil @@ -340,7 +345,7 @@ private extension MoviePlayer { } } - guard let framebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: sampleTime, videoOrientation: videoOrientation) else { return } + guard !disableGPURender, let framebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: sampleTime, videoOrientation: videoOrientation) else { return } framebuffer.userInfo = framebufferUserInfo updateTargetsWithFramebuffer(framebuffer) @@ -351,20 +356,24 @@ private extension MoviePlayer { stop() return } - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in - guard let self = self else { - displayLink.invalidate() - return + if !disableGPURender { + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + self?._displayLinkCallback(displayLink) } - let currentTime = self.currentTime() - if self.videoOutput?.hasNewPixelBuffer(forItemTime: currentTime) == true { - guard let pixelBuffer = self.videoOutput?.copyPixelBuffer(forItemTime: currentTime, itemTimeForDisplay: nil) else { - print("Failed to copy pixel buffer at time:\(currentTime)") - return - } - self._notifyTimeObserver(with: currentTime) - self._process(movieFrame: pixelBuffer, with: currentTime) + } else { + _displayLinkCallback(displayLink) + } + } + + private func _displayLinkCallback(_ displayLink: CADisplayLink) { + let playTime = currentTime() + if self.videoOutput?.hasNewPixelBuffer(forItemTime: playTime) == true { + guard let pixelBuffer = videoOutput?.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: nil) else { + print("Failed to copy pixel buffer at time:\(playTime)") + return } + _notifyTimeObserver(with: playTime) + _process(movieFrame: pixelBuffer, with: playTime) } } From 437497d357b310c4df67a3ecbb76ffd6c7cd252e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 26 Sep 2019 01:22:44 +0800 Subject: [PATCH 167/332] feat(camera): support specifying device type --- framework/Source/iOS/Camera.swift | 86 +++++++++++++++++++------------ 1 file changed, 53 insertions(+), 33 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index e1e4e643..f33d2633 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -36,15 +36,25 @@ public enum PhysicalCameraLocation { } } - public func device() -> AVCaptureDevice? { - let devices = AVCaptureDevice.devices(for: .video) - for device in devices { - if (device.position == self.captureDevicePosition()) { - return device + public func device(_ type: AVCaptureDevice.DeviceType) -> AVCaptureDevice? { + if #available(iOS 13.0, *) { + if let matchedDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [type], mediaType: .video, position: captureDevicePosition()).devices.first { + return matchedDevice } + // Or use default wideAngleCamera + return AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: captureDevicePosition()).devices.first + } else { + // Fallback on earlier versions + let devices = AVCaptureDevice.devices(for: .video) + for device in devices { + + if (device.position == self.captureDevicePosition()) { + return device + } + } + + return AVCaptureDevice.default(for: .video) } - - return AVCaptureDevice.default(for: .video) } } @@ -57,30 +67,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var location:PhysicalCameraLocation { didSet { if oldValue == location { return } - - guard let device = location.device() else { - fatalError("ERROR: Can't find video devices for \(location)") - } - - do { - let newVideoInput = try AVCaptureDeviceInput(device: device) - captureSession.beginConfiguration() - - captureSession.removeInput(videoInput) - if captureSession.canAddInput(newVideoInput) { - inputCamera = device - captureSession.addInput(newVideoInput) - videoInput = newVideoInput - configureStabilization() - } else { - print("Can't add video input") - captureSession.addInput(videoInput) - } - - captureSession.commitConfiguration() - } catch let error { - fatalError("ERROR: Could not init device: \(error)") - } + configureDeviceInput() } } public var runBenchmark:Bool = false @@ -111,6 +98,12 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var audioInput:AVCaptureDeviceInput? public var audioOutput:AVCaptureAudioDataOutput? public var dontDropFrames: Bool = false + public var deviceType = AVCaptureDevice.DeviceType.builtInWideAngleCamera { + didSet { + guard oldValue.rawValue != deviceType.rawValue else { return } + configureDeviceInput() + } + } public var backCameraStableMode: AVCaptureVideoStabilizationMode = .standard { didSet { if location == .backFacing { @@ -141,7 +134,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var captureSessionRestartAttempts = 0 - public init(sessionPreset:AVCaptureSession.Preset, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil, metadataObjectTypes: [AVMetadataObject.ObjectType]? = nil) throws { + public init(sessionPreset:AVCaptureSession.Preset, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil, metadataObjectTypes: [AVMetadataObject.ObjectType]? = nil, deviceType: AVCaptureDevice.DeviceType = .builtInWideAngleCamera) throws { debugPrint("camera init") @@ -151,11 +144,12 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.captureSession = AVCaptureSession() self.captureSession.beginConfiguration() captureSession.sessionPreset = sessionPreset + self.deviceType = deviceType if let cameraDevice = cameraDevice { self.inputCamera = cameraDevice } else { - if let device = location.device() { + if let device = location.device(deviceType) { self.inputCamera = device } else { self.videoInput = nil @@ -258,6 +252,32 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode:stableMode) } + func configureDeviceInput() { + guard let device = location.device(deviceType) else { + fatalError("ERROR: Can't find video devices for \(location)") + } + + do { + let newVideoInput = try AVCaptureDeviceInput(device: device) + captureSession.beginConfiguration() + + captureSession.removeInput(videoInput) + if captureSession.canAddInput(newVideoInput) { + inputCamera = device + captureSession.addInput(newVideoInput) + videoInput = newVideoInput + configureStabilization() + } else { + print("Can't add video input") + captureSession.addInput(videoInput) + } + + captureSession.commitConfiguration() + } catch let error { + fatalError("ERROR: Could not init device: \(error)") + } + } + deinit { debugPrint("camera deinit") From 56e308c50acd204900b5d9de8f0a65b7f61aaf32 Mon Sep 17 00:00:00 2001 From: RoCry Date: Fri, 27 Sep 2019 20:08:47 +0800 Subject: [PATCH 168/332] fix(device): find matching device below iOS 13 --- framework/Source/iOS/Camera.swift | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index f33d2633..2ae886e9 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -37,24 +37,20 @@ public enum PhysicalCameraLocation { } public func device(_ type: AVCaptureDevice.DeviceType) -> AVCaptureDevice? { - if #available(iOS 13.0, *) { - if let matchedDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [type], mediaType: .video, position: captureDevicePosition()).devices.first { - return matchedDevice - } - // Or use default wideAngleCamera - return AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: captureDevicePosition()).devices.first - } else { - // Fallback on earlier versions - let devices = AVCaptureDevice.devices(for: .video) - for device in devices { - - if (device.position == self.captureDevicePosition()) { - return device - } - } - - return AVCaptureDevice.default(for: .video) + if let matchedDevice = AVCaptureDevice.DiscoverySession( + deviceTypes: [type], + mediaType: .video, + position: captureDevicePosition()).devices.first { + return matchedDevice + } + + // Or use default wideAngleCamera + if let device = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: captureDevicePosition()).devices.first { + return device } + + // or fallback to old logic + return AVCaptureDevice.default(for: .video) } } From 807915bfaaf1842f4f011f5ca25014eb7bf36fb2 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 10 Oct 2019 04:47:46 +0800 Subject: [PATCH 169/332] improve(MovieInput): add some debug log --- framework/Source/iOS/MovieInput.swift | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index b168aea1..02f2e97d 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -160,11 +160,13 @@ public class MovieInput: ImageSource { public func cancel() { self.currentThread?.cancel() self.currentThread = nil + synchronizedEncodingDebugPrint("MovieInput cancel") } public func pause() { self.cancel() self.requestedStartTime = self.currentTime + synchronizedEncodingDebugPrint("MovieInput pause") } public func pauseWithoutCancel() { @@ -172,6 +174,7 @@ public class MovieInput: ImageSource { conditionLock.lock() readingShouldWait = true conditionLock.unlock() + synchronizedEncodingDebugPrint("MovieInput pauseWithoutCancel") } public func resume() { @@ -179,6 +182,7 @@ public class MovieInput: ImageSource { readingShouldWait = false conditionLock.signal() conditionLock.unlock() + synchronizedEncodingDebugPrint("MovieInput resume") } // MARK: - From c8e7d6f4f8f7d8395226a292ed60c4b08b7c52d9 Mon Sep 17 00:00:00 2001 From: Kubrick G Date: Thu, 10 Oct 2019 12:48:06 +0800 Subject: [PATCH 170/332] improve(movie-input): cancel associated movie output when canceling movie input. --- framework/Source/iOS/MovieInput.swift | 3 +++ 1 file changed, 3 insertions(+) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 02f2e97d..623ea61d 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -336,6 +336,9 @@ public class MovieInput: ImageSource { self.synchronizedEncodingDebugPrint("MovieInput total frames sent: \(self.totalFramesSent)") self.delegate?.didFinishMovie() self.completion?(nil) + if thread.isCancelled && self.synchronizedMovieOutput != nil { + self.synchronizedMovieOutput?.cancelRecording() + } } } } From 5580ac2e65e1e77ecc04d8a792ae53168f30e870 Mon Sep 17 00:00:00 2001 From: Kubrick G Date: Thu, 10 Oct 2019 17:33:27 +0800 Subject: [PATCH 171/332] fix(movie-output): fix audio buffer thread issue. --- framework/Source/iOS/MovieOutput.swift | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 12b3c36b..787b35ec 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -232,7 +232,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // the session's samples (that is, no samples will be edited out at the end)." self.assetWriter.endSession(atSourceTime: lastFrame) } - self.pendingAudioBuffers.removeAll() + self.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?.pendingAudioBuffers.removeAll() + } if let lastFrame = self.previousFrameTime, let startFrame = self.startFrameTime { self.recordedDuration = lastFrame - startFrame @@ -257,7 +259,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.videoEncodingIsFinished = true self.isRecording = false - self.pendingAudioBuffers.removeAll() + self.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?.pendingAudioBuffers.removeAll() + } self.assetWriter.cancelWriting() completionCallback?() From 7170f52db417bfc9f37f75ff276ea522af08697c Mon Sep 17 00:00:00 2001 From: Kubrick G Date: Mon, 14 Oct 2019 22:48:25 +0800 Subject: [PATCH 172/332] refactor: make public destroy frame buffer and image buffer. --- framework/Source/iOS/PictureInput.swift | 2 +- framework/Source/iOS/RenderView.swift | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 909581fc..01ee1371 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -24,7 +24,7 @@ public enum PictureInputError: Error, CustomStringConvertible { public class PictureInput: ImageSource { public let targets = TargetContainer() - var imageFramebuffer:Framebuffer? + public var imageFramebuffer:Framebuffer? public var framebufferUserInfo:[AnyHashable:Any]? var hasProcessedImage:Bool = false diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 2f034d18..3f4e9ee0 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -135,7 +135,7 @@ public class RenderView:UIView, ImageConsumer { return true } - func destroyDisplayFramebuffer() { + public func destroyDisplayFramebuffer() { if let displayFramebuffer = self.displayFramebuffer { var temporaryFramebuffer = displayFramebuffer glDeleteFramebuffers(1, &temporaryFramebuffer) From 4bb078db56a41846d3905f3f44083c8f39b75a6b Mon Sep 17 00:00:00 2001 From: Kubrick G Date: Tue, 15 Oct 2019 15:16:17 +0800 Subject: [PATCH 173/332] chore(frame-buffer): hide frame buffer setter. --- framework/Source/iOS/PictureInput.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 01ee1371..099f0972 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -24,7 +24,7 @@ public enum PictureInputError: Error, CustomStringConvertible { public class PictureInput: ImageSource { public let targets = TargetContainer() - public var imageFramebuffer:Framebuffer? + public private(set) var imageFramebuffer:Framebuffer? public var framebufferUserInfo:[AnyHashable:Any]? var hasProcessedImage:Bool = false From 8f4efa896f28e569a66e47e11cb5b929b3493b93 Mon Sep 17 00:00:00 2001 From: Kubrick G Date: Wed, 16 Oct 2019 12:37:50 +0800 Subject: [PATCH 174/332] improve(render-view): destroy frame buffer on bounds and frame size changed. --- framework/Source/iOS/RenderView.swift | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 3f4e9ee0..e7043983 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -51,12 +51,14 @@ public class RenderView:UIView, ImageConsumer { override public var bounds: CGRect { didSet { // Check if the size changed - if(oldValue.size != self.bounds.size) { - // Destroy the displayFramebuffer so we render at the correct size for the next frame - sharedImageProcessingContext.runOperationAsynchronously{ - self.destroyDisplayFramebuffer() - } - } + destroyFramebufferOnSizeChanged(oldSize: oldValue.size, newSize: self.bounds.size) + } + } + + override public var frame: CGRect { + didSet { + // Check if the size changed + destroyFramebufferOnSizeChanged(oldSize: oldValue.size, newSize: self.frame.size) } } @@ -135,7 +137,7 @@ public class RenderView:UIView, ImageConsumer { return true } - public func destroyDisplayFramebuffer() { + func destroyDisplayFramebuffer() { if let displayFramebuffer = self.displayFramebuffer { var temporaryFramebuffer = displayFramebuffer glDeleteFramebuffers(1, &temporaryFramebuffer) @@ -148,6 +150,15 @@ public class RenderView:UIView, ImageConsumer { } } + func destroyFramebufferOnSizeChanged(oldSize: CGSize, newSize: CGSize) { + if(oldSize != newSize) { + // Destroy the displayFramebuffer so we render at the correct size for the next frame + sharedImageProcessingContext.runOperationAsynchronously{ + self.destroyDisplayFramebuffer() + } + } + } + func activateDisplayFramebuffer() { glBindFramebuffer(GLenum(GL_FRAMEBUFFER), displayFramebuffer!) glViewport(0, 0, backingSize.width, backingSize.height) From 10ffe10a9f25b3b69eac34061e0d52b443c29227 Mon Sep 17 00:00:00 2001 From: Cokile Date: Wed, 16 Oct 2019 12:18:47 +0800 Subject: [PATCH 175/332] chore: resolve some compiler warnings --- framework/Source/Mac/Camera.swift | 4 ++-- framework/Source/Mac/MovieInput.swift | 2 +- framework/Source/SerialDispatch.swift | 19 +++++++++---------- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index accf294a..41a0dd07 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -35,8 +35,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let captureAsYUV:Bool let yuvConversionShader:ShaderProgram? let frameRenderingSemaphore = DispatchSemaphore(value:1) - let cameraProcessingQueue = DispatchQueue.global(priority:standardProcessingQueuePriority) - let audioProcessingQueue = DispatchQueue.global(priority:lowProcessingQueuePriority) + let cameraProcessingQueue = standardProcessingQueue + let audioProcessingQueue = lowProcessingQueue var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 diff --git a/framework/Source/Mac/MovieInput.swift b/framework/Source/Mac/MovieInput.swift index ec6cec15..41528f51 100644 --- a/framework/Source/Mac/MovieInput.swift +++ b/framework/Source/Mac/MovieInput.swift @@ -44,7 +44,7 @@ public class MovieInput: ImageSource { public func start() { asset.loadValuesAsynchronously(forKeys: ["tracks"], completionHandler: { - DispatchQueue.global(priority:standardProcessingQueuePriority).async { + DispatchQueue.global().async { guard (self.asset.statusOfValue(forKey:"tracks", error:nil) == .loaded) else { return } guard self.assetReader.startReading() else { diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 6c3c5ceb..a7276489 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -18,22 +18,21 @@ extension SerialDispatch { #else -public let standardProcessingQueuePriority:DispatchQueue.GlobalQueuePriority = { - // DispatchQueue.QoSClass.default +public var standardProcessingQueue:DispatchQueue { if #available(iOS 10, OSX 10.10, *) { - return DispatchQueue.GlobalQueuePriority.default + return DispatchQueue.global(qos: .default) } else { - return DispatchQueue.GlobalQueuePriority.default + return DispatchQueue.global(priority: .default) } -}() - -public let lowProcessingQueuePriority:DispatchQueue.GlobalQueuePriority = { +} + +public var lowProcessingQueue:DispatchQueue { if #available(iOS 10, OSX 10.10, *) { - return DispatchQueue.GlobalQueuePriority.low + return DispatchQueue.global(qos: .background) } else { - return DispatchQueue.GlobalQueuePriority.low + return DispatchQueue.global(priority: .low) } -}() +} func runAsynchronouslyOnMainQueue(_ mainThreadOperation:@escaping () -> ()) { if (Thread.isMainThread) { From f1da5e3f5549edf657fc03e32d1195ee23487293 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 3 Nov 2019 16:12:19 +0800 Subject: [PATCH 176/332] improve(MovieOutput): change critical path log to debugLog --- framework/Source/iOS/MovieOutput.swift | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 787b35ec..ceea54a5 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -158,8 +158,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let block = { () -> Void in do { guard self.assetWriter.status != .cancelled else { - completionCallback?(false, MovieOutputError.startWritingError(assetWriterError: nil)) - return + throw MovieOutputError.startWritingError(assetWriterError: nil) } let observation = self.assetWriter.observe(\.error) { [weak self] writer, _ in @@ -195,12 +194,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.isRecording = true - self.synchronizedEncodingDebugPrint("MovieOutput started writing") + debugPrint("MovieOutput started writing") completionCallback?(true, nil) } catch { self.assetWriter.cancelWriting() + debugPrint("MovieOutput failed to start writing. error:\(error)") + completionCallback?(false, error) } } @@ -242,8 +243,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.assetWriter.finishWriting { completionCallback?() } - self.synchronizedEncodingDebugPrint("MovieOutput finished writing") - self.synchronizedEncodingDebugPrint("MovieOutput total frames appended: \(self.totalFramesAppended)") + debugPrint("MovieOutput finished writing. Total frames appended:\(self.totalFramesAppended)") } } @@ -265,7 +265,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.assetWriter.cancelWriting() completionCallback?() - self.synchronizedEncodingDebugPrint("MovieOutput cancel writing") + debugPrint("MovieOutput cancel writing") } } From 4c8de6d84e828cfbdbab92282f677250731ee36e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 8 Nov 2019 23:29:52 +0800 Subject: [PATCH 177/332] improve: MovieOutput log change to print, fix example project compile error --- framework/GPUImage.xcodeproj/project.pbxproj | 8 ++++++++ framework/Source/Framebuffer.swift | 1 + framework/Source/iOS/MovieOutput.swift | 14 +++++++------- 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index 1a5322d0..75244ce1 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -18,6 +18,8 @@ 1F6D1CBA2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; 1F6D1CBB2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; 1F6D1CC02048FFD900317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */; }; + 264B6AD9237303370090979C /* MoviePlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 264B6AD6237303040090979C /* MoviePlayer.swift */; }; + 264B6ADA2373033B0090979C /* FramebufferGenerator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 264B6AD5237303040090979C /* FramebufferGenerator.swift */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; BC0923A11C92661D00A2ADFA /* Pipeline_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */; }; BC0923A21C92664900A2ADFA /* Framebuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB279EB1C8D11630013E213 /* Framebuffer.swift */; }; @@ -391,6 +393,8 @@ 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = TPCircularBuffer.h; path = Source/TPCircularBuffer.h; sourceTree = ""; }; 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = TPCircularBuffer.m; path = Source/TPCircularBuffer.m; sourceTree = ""; }; 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SpeakerOutput.swift; path = Source/iOS/SpeakerOutput.swift; sourceTree = ""; }; + 264B6AD5237303040090979C /* FramebufferGenerator.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = FramebufferGenerator.swift; path = Source/iOS/FramebufferGenerator.swift; sourceTree = ""; }; + 264B6AD6237303040090979C /* MoviePlayer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = MoviePlayer.swift; path = Source/iOS/MoviePlayer.swift; sourceTree = ""; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Pipeline_Tests.swift; path = Tests/Pipeline_Tests.swift; sourceTree = SOURCE_ROOT; }; BC1E12F41C9F2FD7008F844F /* ThreeInput.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ThreeInput.vsh; path = Source/Operations/Shaders/ThreeInput.vsh; sourceTree = ""; }; @@ -1132,6 +1136,8 @@ BC9E350A1E524C8000B8604F /* iOS */ = { isa = PBXGroup; children = ( + 264B6AD5237303040090979C /* FramebufferGenerator.swift */, + 264B6AD6237303040090979C /* MoviePlayer.swift */, BC9E350B1E524CB900B8604F /* Camera.swift */, BC9E35111E524CE400B8604F /* YUVConversionFullRange_GLES.fsh */, BC9E35131E524CE400B8604F /* YUVConversionFullRangeUVPlanar_GLES.fsh */, @@ -1617,6 +1623,7 @@ BC9E35931E52574100B8604F /* UnsharpMask.swift in Sources */, BC9E35AF1E52579900B8604F /* BulgeDistortion.swift in Sources */, BC9E35741E5256DE00B8604F /* LookupFilter.swift in Sources */, + 264B6AD9237303370090979C /* MoviePlayer.swift in Sources */, BC9E35901E52573700B8604F /* Dilation.swift in Sources */, BC9E35B71E5257B300B8604F /* ThresholdSketch.swift in Sources */, BC9E35711E5256D500B8604F /* LevelsAdjustment.swift in Sources */, @@ -1719,6 +1726,7 @@ BC9E35B41E5257A900B8604F /* ToonFilter.swift in Sources */, BC9E354F1E52508A00B8604F /* RawDataInput.swift in Sources */, BC9E35681E5256BD00B8604F /* GammaAdjustment.swift in Sources */, + 264B6ADA2373033B0090979C /* FramebufferGenerator.swift in Sources */, BC9E35A81E52578400B8604F /* Vignette.swift in Sources */, BC9E355A1E5252C400B8604F /* TextureOutput.swift in Sources */, BC9E35841E52571300B8604F /* ColorLocalBinaryPattern.swift in Sources */, diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 8b6bbbae..e7687ae7 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -254,6 +254,7 @@ public extension Size { } #if DEBUG +import UIKit public extension Framebuffer { func debugUIImage() -> UIImage? { let bufferSize = Int(size.width * size.height * 4) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index ceea54a5..fc67813c 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -83,7 +83,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { deinit { observations.forEach { $0.invalidate() } - debugPrint("movie output deinit \(assetWriter.outputURL)") + print("movie output deinit \(assetWriter.outputURL)") } var shouldWaitForEncoding: Bool { return !encodingLiveVideo || waitUtilDataIsReadyForLiveVideo @@ -92,7 +92,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil, keepLastPixelBuffer: Bool = false) throws { - debugPrint("movie output init \(URL)") + print("movie output init \(URL)") imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup let movieProcessingContext = OpenGLContext() @@ -170,7 +170,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if let preferredTransform = self.preferredTransform { self.assetWriterVideoInput.transform = preferredTransform } - + print("MovieOutput starting writing...") var success = false try NSObject.catchException { success = self.assetWriter.startWriting() @@ -194,13 +194,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.isRecording = true - debugPrint("MovieOutput started writing") + print("MovieOutput started writing") completionCallback?(true, nil) } catch { self.assetWriter.cancelWriting() - debugPrint("MovieOutput failed to start writing. error:\(error)") + print("MovieOutput failed to start writing. error:\(error)") completionCallback?(false, error) } @@ -243,7 +243,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.assetWriter.finishWriting { completionCallback?() } - debugPrint("MovieOutput finished writing. Total frames appended:\(self.totalFramesAppended)") + print("MovieOutput finished writing. Total frames appended:\(self.totalFramesAppended)") } } @@ -265,7 +265,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.assetWriter.cancelWriting() completionCallback?() - debugPrint("MovieOutput cancel writing") + print("MovieOutput cancel writing") } } From e6347bd57e439bfdef0496456ce2f19c313ad2e9 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 19 Nov 2019 22:54:51 +0800 Subject: [PATCH 178/332] fix(movieplayer): fix possible crash when stop MoviePlayer --- framework/Source/iOS/MoviePlayer.swift | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index f8319450..2360f7a3 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -96,7 +96,8 @@ public class MoviePlayer: AVPlayer, ImageSource { deinit { debugPrint("movie player deinit \(String(describing: asset))") - stop() + pause() + displayLink?.invalidate() _removePlayerObservers() } @@ -179,7 +180,9 @@ public class MoviePlayer: AVPlayer, ImageSource { public func stop() { pause() debugPrint("movie player stop \(String(describing: asset))") - timeObserversQueue.removeAll() + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + self?.timeObserversQueue.removeAll() + } displayLink?.invalidate() displayLink = nil isSeeking = false From d4585cd344200abb6fb324ebeb83a4139e29e388 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 27 Nov 2019 22:47:11 +0800 Subject: [PATCH 179/332] fix(movieplayer): fix seeking timing and add log for invalid playeritem --- framework/Source/iOS/MoviePlayer.swift | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 2360f7a3..78436609 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -199,7 +199,11 @@ public class MoviePlayer: AVPlayer, ImageSource { } else { nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: .zero, toleranceAfter: .zero, shouldPlayAfterSeeking: shouldPlayAfterSeeking) } - actuallySeekToTime() + if assetDuration <= 0 { + print("cannot seek since assetDuration is 0. currentItem:\(String(describing: currentItem))") + } else { + actuallySeekToTime() + } } func actuallySeekToTime() { @@ -326,7 +330,11 @@ private extension MoviePlayer { func resumeIfNeeded() { guard isReadyToPlay && isPlaying == true && rate != playrate else { return } - rate = playrate + if nextSeeking != nil { + actuallySeekToTime() + } else { + rate = playrate + } } // MARK: - From a4a3be078ce170e50e5fc2464e78989da8225fad Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 28 Nov 2019 12:56:47 +0800 Subject: [PATCH 180/332] feat: change to queueplayer, and update loop logic --- framework/Source/iOS/MoviePlayer.swift | 154 ++++++++++++++++++++----- 1 file changed, 124 insertions(+), 30 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 78436609..8da6973c 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -24,19 +24,21 @@ public struct MoviePlayerTimeObserver { } } -public class MoviePlayer: AVPlayer, ImageSource { +private var looperDict = [MoviePlayer: AVPlayerLooper]() + +public class MoviePlayer: AVQueuePlayer, ImageSource { public let targets = TargetContainer() public var runBenchmark = false public var logEnabled = false public weak var delegate: MoviePlayerDelegate? public var startTime: TimeInterval? public var endTime: TimeInterval? + /// Whether to loop play. public var loop = false - public private(set) var asset: AVAsset? + public var asset: AVAsset? { return currentItem?.asset } public private(set) var isPlaying = false + public var lastPlayerItem: AVPlayerItem? - private(set) var playerItem: AVPlayerItem? - var videoOutput: AVPlayerItemVideoOutput? var displayLink: CADisplayLink? lazy var framebufferGenerator = FramebufferGenerator() @@ -58,6 +60,9 @@ public class MoviePlayer: AVPlayer, ImageSource { guard let asset = asset else { return .portrait } return asset.imageOrientation ?? .portrait } + public var didPlayToEnd: Bool { + return currentTime().seconds >= assetDuration + } var framebufferUserInfo: [AnyHashable:Any]? var observations = [NSKeyValueObservation]() @@ -80,14 +85,13 @@ public class MoviePlayer: AVPlayer, ImageSource { public var disableGPURender = false public override init() { - debugPrint("movie player init") + print("movie player init") // Make sure player it intialized on the main thread, or it might cause KVO crash assert(Thread.isMainThread) super.init() } override public init(playerItem item: AVPlayerItem?) { - self.playerItem = item // Make sure player it intialized on the main thread, or it might cause KVO crash assert(Thread.isMainThread) super.init(playerItem: item) @@ -95,7 +99,7 @@ public class MoviePlayer: AVPlayer, ImageSource { } deinit { - debugPrint("movie player deinit \(String(describing: asset))") + print("movie player deinit \(String(describing: asset))") pause() displayLink?.invalidate() _removePlayerObservers() @@ -108,13 +112,33 @@ public class MoviePlayer: AVPlayer, ImageSource { replaceCurrentItem(with: playerItem) } + override public func insert(_ item: AVPlayerItem, after afterItem: AVPlayerItem?) { + insert(item, after: afterItem, disableGPURender: disableGPURender) + } + + public func insert(_ item: AVPlayerItem, after afterItem: AVPlayerItem?, disableGPURender: Bool) { + if !disableGPURender { + let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] + let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) + videoOutput.suppressesPlayerRendering = true + item.add(videoOutput) + item.audioTimePitchAlgorithm = .varispeed + } + lastPlayerItem = item + self.disableGPURender = disableGPURender + _setupPlayerObservers() + super.insert(item, after: afterItem) + } + override public func replaceCurrentItem(with item: AVPlayerItem?) { + replaceCurrentItem(with: item, disableGPURender: disableGPURender) + } + + public func replaceCurrentItem(with item: AVPlayerItem?, disableGPURender: Bool) { if isPlaying { stop() } - self.videoOutput.map { self.playerItem?.remove($0) } - self.playerItem = item - self.asset = item?.asset + lastPlayerItem = item if let item = item { if !disableGPURender { let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] @@ -122,24 +146,47 @@ public class MoviePlayer: AVPlayer, ImageSource { videoOutput.suppressesPlayerRendering = true item.add(videoOutput) item.audioTimePitchAlgorithm = .varispeed - self.videoOutput = videoOutput - } else { - self.videoOutput = nil } _setupPlayerObservers() } else { - self.videoOutput = nil _removePlayerObservers() } - + self.disableGPURender = disableGPURender super.replaceCurrentItem(with: item) } + public func replayLastItem() { + guard let playerItem = lastPlayerItem else { return } + remove(playerItem) + insert(playerItem, after: nil) + let start = startTime ?? 0 + if playerItem.currentTime().seconds != start { + seekToTime(start, shouldPlayAfterSeeking: true) + } else { + play() + } + } + + override public func remove(_ item: AVPlayerItem) { + super.remove(item) + print("remove item:\(item)") + } + + override public func removeAllItems() { + super.removeAllItems() + print("remove all items") + } + + override public func advanceToNextItem() { + super.advanceToNextItem() + print("advance to next item") + } + // MARK: - // MARK: Playback control override public func play() { - if displayLink == nil { + if displayLink == nil || didPlayToEnd { start() } else { resume() @@ -152,34 +199,60 @@ public class MoviePlayer: AVPlayer, ImageSource { } public func start() { - guard playerItem != nil else { - assert(playerItem != nil) - debugPrint("ERROR! player hasn't been setup before starting") + if actionAtItemEnd == .advance { + if let currentItem = currentItem { + if didPlayToEnd { + remove(currentItem) + insert(currentItem, after: nil) + } + } else if let playerItem = lastPlayerItem { + insert(playerItem, after: nil) + } + } + + guard currentItem != nil else { + assert(currentItem != nil) + print("ERROR! player hasn't been setup before starting") return } isPlaying = true - debugPrint("movie player start \(String(describing: asset))") + print("movie player start duration:\(String(describing: asset?.duration.seconds)) \(String(describing: asset))") _setupDisplayLinkIfNeeded() _resetTimeObservers() - seekToTime(startTime ?? 0, shouldPlayAfterSeeking: true) + if loop { + if let playerItem = lastPlayerItem { + looperDict[self]?.disableLooping() + let start = CMTime(seconds: startTime ?? 0, preferredTimescale: 600) + let end = CMTime(seconds: endTime ?? assetDuration, preferredTimescale: 600) + let looper = AVPlayerLooper(player: self, templateItem: playerItem, timeRange: CMTimeRange(start: start, end: end)) + looperDict[self] = looper + } + rate = playrate + } else { + if let startTime = startTime { + seekToTime(startTime, shouldPlayAfterSeeking: true) + } else { + rate = playrate + } + } } public func resume() { isPlaying = true rate = playrate - debugPrint("movie player resume \(String(describing: asset))") + print("movie player resume \(String(describing: asset))") } override public func pause() { isPlaying = false guard rate != 0 else { return } - debugPrint("movie player pause \(String(describing: asset))") + print("movie player pause \(String(describing: asset))") super.pause() } public func stop() { pause() - debugPrint("movie player stop \(String(describing: asset))") + print("movie player stop \(String(describing: asset))") sharedImageProcessingContext.runOperationAsynchronously { [weak self] in self?.timeObserversQueue.removeAll() } @@ -187,6 +260,8 @@ public class MoviePlayer: AVPlayer, ImageSource { displayLink = nil isSeeking = false nextSeeking = nil + looperDict[self]?.disableLooping() + looperDict[self] = nil } public func seekToTime(_ time: TimeInterval, shouldPlayAfterSeeking: Bool) { @@ -239,6 +314,17 @@ public class MoviePlayer: AVPlayer, ImageSource { totalTimeObservers = totalTimeObservers.sorted { (lhs, rhs) in return lhs.targetTime > rhs.targetTime } + if isPlaying { + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + guard let self = self else { return } + if let lastIndex = self.timeObserversQueue.firstIndex(where: { $0.targetTime >= seconds }) { + self.timeObserversQueue.insert(timeObserver, at: lastIndex) + } else { + self.timeObserversQueue.append(timeObserver) + } + } + } + return timeObserver } @@ -277,7 +363,7 @@ private extension MoviePlayer { observations.append(observe(\.rate) { [weak self] _, _ in self?.playerRateDidChange() }) - if let playerItem = playerItem { + if let playerItem = currentItem { observations.append(playerItem.observe(\AVPlayerItem.status) { [weak self] _, _ in self?.playerItemStatusDidChange() }) @@ -299,7 +385,7 @@ private extension MoviePlayer { } timeObserversQueue.append(observer) } - if let endTime = endTime { + if !loop, let endTime = endTime { let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in if self?.loop == true && self?.isPlaying == true { self?.pause() @@ -376,16 +462,24 @@ private extension MoviePlayer { } } - private func _displayLinkCallback(_ displayLink: CADisplayLink) { + var videoOutput: AVPlayerItemVideoOutput? { + return currentItem?.outputs.first(where: { $0 is AVPlayerItemVideoOutput }) as? AVPlayerItemVideoOutput + } + + func _displayLinkCallback(_ displayLink: CADisplayLink) { let playTime = currentTime() - if self.videoOutput?.hasNewPixelBuffer(forItemTime: playTime) == true { - guard let pixelBuffer = videoOutput?.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: nil) else { +// debugPrint("playtime:\(playTime.seconds)") + guard playTime.seconds > 0 else { return } + if let videoOutput = videoOutput, videoOutput.hasNewPixelBuffer(forItemTime: playTime) == true { + guard let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: nil) else { print("Failed to copy pixel buffer at time:\(playTime)") return } - _notifyTimeObserver(with: playTime) _process(movieFrame: pixelBuffer, with: playTime) } + if playTime.seconds > 0 { + _notifyTimeObserver(with: playTime) + } } @objc func playerDidPlayToEnd(notification: Notification) { From 58b25065219603ef1bf866fdcbf8488bcb1acd1f Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 1 Dec 2019 00:43:19 +0800 Subject: [PATCH 181/332] fix(player): fix playerItem observer is not correct --- framework/Source/iOS/MoviePlayer.swift | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 8da6973c..fa29b6e1 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -126,7 +126,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } lastPlayerItem = item self.disableGPURender = disableGPURender - _setupPlayerObservers() + _setupPlayerObservers(playerItem: item) super.insert(item, after: afterItem) } @@ -147,7 +147,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { item.add(videoOutput) item.audioTimePitchAlgorithm = .varispeed } - _setupPlayerObservers() + _setupPlayerObservers(playerItem: item) } else { _removePlayerObservers() } @@ -353,7 +353,7 @@ private extension MoviePlayer { } } - func _setupPlayerObservers() { + func _setupPlayerObservers(playerItem: AVPlayerItem?) { _removePlayerObservers() NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) @@ -363,9 +363,9 @@ private extension MoviePlayer { observations.append(observe(\.rate) { [weak self] _, _ in self?.playerRateDidChange() }) - if let playerItem = currentItem { - observations.append(playerItem.observe(\AVPlayerItem.status) { [weak self] _, _ in - self?.playerItemStatusDidChange() + if let item = playerItem { + observations.append(item.observe(\AVPlayerItem.status) { [weak self] _, _ in + self?.playerItemStatusDidChange(item) }) } } @@ -409,9 +409,11 @@ private extension MoviePlayer { resumeIfNeeded() } - func playerItemStatusDidChange() { - debugPrint("PlayerItem status change to:\(String(describing: currentItem?.status.rawValue)) asset:\(String(describing: asset))") - resumeIfNeeded() + func playerItemStatusDidChange(_ playerItem: AVPlayerItem) { + debugPrint("PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset)") + if playerItem == currentItem { + resumeIfNeeded() + } } func resumeIfNeeded() { From 5a329731d3c7792143648b0b6fc82c5f3e9f76ac Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 3 Dec 2019 19:02:37 +0800 Subject: [PATCH 182/332] improve(player): extract same code and add more log --- framework/Source/iOS/MoviePlayer.swift | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index fa29b6e1..961d3d0d 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -118,15 +118,13 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public func insert(_ item: AVPlayerItem, after afterItem: AVPlayerItem?, disableGPURender: Bool) { if !disableGPURender { - let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] - let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) - videoOutput.suppressesPlayerRendering = true - item.add(videoOutput) - item.audioTimePitchAlgorithm = .varispeed + _setupPlayerItemVideoOutput(for: item) } + item.audioTimePitchAlgorithm = .varispeed lastPlayerItem = item self.disableGPURender = disableGPURender _setupPlayerObservers(playerItem: item) + print("insert new item:\(item) afterItem:\(String(describing: afterItem)) disableGPURender:\(disableGPURender)") super.insert(item, after: afterItem) } @@ -141,17 +139,15 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { lastPlayerItem = item if let item = item { if !disableGPURender { - let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] - let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) - videoOutput.suppressesPlayerRendering = true - item.add(videoOutput) - item.audioTimePitchAlgorithm = .varispeed + _setupPlayerItemVideoOutput(for: item) } + item.audioTimePitchAlgorithm = .varispeed _setupPlayerObservers(playerItem: item) } else { _removePlayerObservers() } self.disableGPURender = disableGPURender + print("replace current item with:\(String(describing: item)) disableGPURender:\(disableGPURender)") super.replaceCurrentItem(with: item) } @@ -165,6 +161,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } else { play() } + print("replay last item:\(playerItem)") } override public func remove(_ item: AVPlayerItem) { @@ -353,6 +350,13 @@ private extension MoviePlayer { } } + func _setupPlayerItemVideoOutput(for item: AVPlayerItem) { + let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] + let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) + videoOutput.suppressesPlayerRendering = true + item.add(videoOutput) + } + func _setupPlayerObservers(playerItem: AVPlayerItem?) { _removePlayerObservers() NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) From e0ea45280906e9bb5521329d5c5b015d5ba984f3 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 4 Dec 2019 18:33:32 +0800 Subject: [PATCH 183/332] fix(player): replace doesn't seek to the beginning, update log and assert --- framework/Source/iOS/MoviePlayer.swift | 30 ++++++++++---------------- 1 file changed, 11 insertions(+), 19 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 961d3d0d..845d094b 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -38,6 +38,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public var asset: AVAsset? { return currentItem?.asset } public private(set) var isPlaying = false public var lastPlayerItem: AVPlayerItem? + public var playableItem: AVPlayerItem? { currentItem ?? lastPlayerItem } var displayLink: CADisplayLink? @@ -124,8 +125,8 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { lastPlayerItem = item self.disableGPURender = disableGPURender _setupPlayerObservers(playerItem: item) - print("insert new item:\(item) afterItem:\(String(describing: afterItem)) disableGPURender:\(disableGPURender)") super.insert(item, after: afterItem) + print("insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) disableGPURender:\(disableGPURender) itemsCount:\(items().count)") } override public func replaceCurrentItem(with item: AVPlayerItem?) { @@ -147,7 +148,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { _removePlayerObservers() } self.disableGPURender = disableGPURender - print("replace current item with:\(String(describing: item)) disableGPURender:\(disableGPURender)") + print("replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) disableGPURender:\(disableGPURender) itemsCount:\(items().count)") super.replaceCurrentItem(with: item) } @@ -196,20 +197,13 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } public func start() { - if actionAtItemEnd == .advance { - if let currentItem = currentItem { - if didPlayToEnd { - remove(currentItem) - insert(currentItem, after: nil) - } - } else if let playerItem = lastPlayerItem { - insert(playerItem, after: nil) - } + if actionAtItemEnd == .advance, currentItem == nil, let playerItem = lastPlayerItem { + insert(playerItem, after: nil) } guard currentItem != nil else { - assert(currentItem != nil) - print("ERROR! player hasn't been setup before starting") + // Sometime the player.items() seems still 0 even if insert was called, but it won't result in crash, just print a error log for information. + print("ERROR! player currentItem is nil") return } isPlaying = true @@ -224,13 +218,11 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { let looper = AVPlayerLooper(player: self, templateItem: playerItem, timeRange: CMTimeRange(start: start, end: end)) looperDict[self] = looper } - rate = playrate + } + if currentTime().seconds != (startTime ?? 0) { + seekToTime(startTime ?? 0, shouldPlayAfterSeeking: true) } else { - if let startTime = startTime { - seekToTime(startTime, shouldPlayAfterSeeking: true) - } else { - rate = playrate - } + rate = playrate } } From f67cec235577d8dac4b02f27ca513cf8d4e8325b Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 5 Dec 2019 13:59:35 +0800 Subject: [PATCH 184/332] fix(player): move all time observer operations into main thread --- framework/Source/iOS/MoviePlayer.swift | 122 ++++++++++++++----------- 1 file changed, 67 insertions(+), 55 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 845d094b..8b3b812c 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -242,7 +242,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public func stop() { pause() print("movie player stop \(String(describing: asset))") - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + _timeObserversUpdate { [weak self] in self?.timeObserversQueue.removeAll() } displayLink?.invalidate() @@ -299,13 +299,13 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public func addTimeObserver(seconds: TimeInterval, callback: @escaping MoviePlayerTimeObserverCallback) -> MoviePlayerTimeObserver { let timeObserver = MoviePlayerTimeObserver(targetTime: seconds, callback: callback) - totalTimeObservers.append(timeObserver) - totalTimeObservers = totalTimeObservers.sorted { (lhs, rhs) in - return lhs.targetTime > rhs.targetTime - } - if isPlaying { - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in - guard let self = self else { return } + _timeObserversUpdate { [weak self] in + guard let self = self else { return } + self.totalTimeObservers.append(timeObserver) + self.totalTimeObservers = self.totalTimeObservers.sorted { (lhs, rhs) in + return lhs.targetTime > rhs.targetTime + } + if self.isPlaying { if let lastIndex = self.timeObserversQueue.firstIndex(where: { $0.targetTime >= seconds }) { self.timeObserversQueue.insert(timeObserver, at: lastIndex) } else { @@ -313,21 +313,18 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } } } - return timeObserver } public func removeTimeObserver(timeObserver: MoviePlayerTimeObserver) { - totalTimeObservers.removeAll { (observer) -> Bool in - return observer.observerID == timeObserver.observerID - } - timeObserversQueue.removeAll { (observer) -> Bool in - return observer.observerID == timeObserver.observerID + _timeObserversUpdate { [weak self] in + self?.totalTimeObservers.removeAll { $0.observerID == timeObserver.observerID } + self?.timeObserversQueue.removeAll { $0.observerID == timeObserver.observerID } } } public func removeAllTimeObservers() { - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + _timeObserversUpdate { [weak self] in self?.timeObserversQueue.removeAll() self?.totalTimeObservers.removeAll() } @@ -373,25 +370,39 @@ private extension MoviePlayer { observations.removeAll() } - func _resetTimeObservers() { - timeObserversQueue.removeAll() - for observer in totalTimeObservers { - guard observer.targetTime >= (startTime ?? 0) && observer.targetTime <= endTime ?? assetDuration else { - continue + /// NOTE: all time observer operations will be executed in main queue + func _timeObserversUpdate(_ block: @escaping () -> Void) { + if Thread.isMainThread { + block() + } else { + DispatchQueue.main.async { + block() } - timeObserversQueue.append(observer) } - if !loop, let endTime = endTime { - let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in - if self?.loop == true && self?.isPlaying == true { - self?.pause() - self?.start() - } else { - self?.pause() + } + + func _resetTimeObservers() { + _timeObserversUpdate { [weak self] in + guard let self = self else { return } + self.timeObserversQueue.removeAll() + for observer in self.totalTimeObservers { + guard observer.targetTime >= (self.startTime ?? 0) && observer.targetTime <= self.endTime ?? self.assetDuration else { + continue } + self.timeObserversQueue.append(observer) + } + if !self.loop, let endTime = self.endTime { + let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in + if self?.loop == true && self?.isPlaying == true { + self?.pause() + self?.start() + } else { + self?.pause() + } + } + let insertIndex: Int = self.timeObserversQueue.reversed().firstIndex { endTime < $0.targetTime } ?? 0 + self.timeObserversQueue.insert(endTimeObserver, at: insertIndex) } - let insertIndex: Int = timeObserversQueue.reversed().firstIndex { endTime < $0.targetTime } ?? 0 - timeObserversQueue.insert(endTimeObserver, at: insertIndex) } } @@ -424,8 +435,13 @@ private extension MoviePlayer { // MARK: - // MARK: Internal processing functions - func _process(movieFrame: CVPixelBuffer, with sampleTime: CMTime) { - delegate?.moviePlayerDidReadPixelBuffer(movieFrame, time: CMTimeGetSeconds(sampleTime)) + func _process(videoOutput: AVPlayerItemVideoOutput, at playTime: CMTime) { + guard let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: nil) else { + print("Failed to copy pixel buffer at time:\(playTime)") + return + } + + delegate?.moviePlayerDidReadPixelBuffer(pixelBuffer, time: CMTimeGetSeconds(playTime)) let startTime = CFAbsoluteTimeGetCurrent() if runBenchmark || logEnabled { @@ -440,7 +456,7 @@ private extension MoviePlayer { } } - guard !disableGPURender, let framebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: sampleTime, videoOrientation: videoOrientation) else { return } + guard !disableGPURender, let framebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: playTime, videoOrientation: videoOrientation) else { return } framebuffer.userInfo = framebufferUserInfo updateTargetsWithFramebuffer(framebuffer) @@ -451,49 +467,45 @@ private extension MoviePlayer { stop() return } - if !disableGPURender { - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in - self?._displayLinkCallback(displayLink) - } - } else { - _displayLinkCallback(displayLink) - } - } - - var videoOutput: AVPlayerItemVideoOutput? { - return currentItem?.outputs.first(where: { $0 is AVPlayerItemVideoOutput }) as? AVPlayerItemVideoOutput - } - - func _displayLinkCallback(_ displayLink: CADisplayLink) { + let playTime = currentTime() // debugPrint("playtime:\(playTime.seconds)") guard playTime.seconds > 0 else { return } if let videoOutput = videoOutput, videoOutput.hasNewPixelBuffer(forItemTime: playTime) == true { - guard let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: nil) else { - print("Failed to copy pixel buffer at time:\(playTime)") - return + if !disableGPURender { + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + self?._process(videoOutput: videoOutput, at: playTime) + } + } else { + _process(videoOutput: videoOutput, at: playTime) } - _process(movieFrame: pixelBuffer, with: playTime) } if playTime.seconds > 0 { _notifyTimeObserver(with: playTime) } } + var videoOutput: AVPlayerItemVideoOutput? { + return currentItem?.outputs.first(where: { $0 is AVPlayerItemVideoOutput }) as? AVPlayerItemVideoOutput + } + @objc func playerDidPlayToEnd(notification: Notification) { + print("player did play to end. notification:\(notification)") + guard (notification.object as? AVPlayerItem) == currentItem else { return } guard loop && isPlaying && (endTime == nil || currentTime().seconds == assetDuration) else { return } start() } @objc func playerStalled(notification: Notification) { print("player was stalled. notification:\(notification)") + guard (notification.object as? AVPlayerItem) == currentItem else { return } } func _notifyTimeObserver(with sampleTime: CMTime) { - let currentTime = CMTimeGetSeconds(sampleTime) - while let lastObserver = timeObserversQueue.last, lastObserver.targetTime <= currentTime { - timeObserversQueue.removeLast() - DispatchQueue.main.async { + let currentTime = sampleTime.seconds + _timeObserversUpdate { [weak self] in + while let lastObserver = self?.timeObserversQueue.last, lastObserver.targetTime <= currentTime { + self?.timeObserversQueue.removeLast() lastObserver.callback(currentTime) } } From 0f1a9396e00d87aa623d27da92cf5e3ca5d57351 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 5 Dec 2019 19:32:38 +0800 Subject: [PATCH 185/332] fix(player): improve looping logic --- framework/Source/iOS/MoviePlayer.swift | 58 ++++++++++++++------------ 1 file changed, 31 insertions(+), 27 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 8b3b812c..faca96fb 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -24,18 +24,19 @@ public struct MoviePlayerTimeObserver { } } -private var looperDict = [MoviePlayer: AVPlayerLooper]() - public class MoviePlayer: AVQueuePlayer, ImageSource { + static var looperDict = [MoviePlayer: AVPlayerLooper]() public let targets = TargetContainer() public var runBenchmark = false public var logEnabled = false public weak var delegate: MoviePlayerDelegate? public var startTime: TimeInterval? + public var actualStartTime: TimeInterval { startTime ?? 0 } public var endTime: TimeInterval? + public var actualEndTime: TimeInterval { endTime ?? (assetDuration - actualStartTime) } /// Whether to loop play. public var loop = false - public var asset: AVAsset? { return currentItem?.asset } + public var asset: AVAsset? { return playableItem?.asset } public private(set) var isPlaying = false public var lastPlayerItem: AVPlayerItem? public var playableItem: AVPlayerItem? { currentItem ?? lastPlayerItem } @@ -148,17 +149,15 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { _removePlayerObservers() } self.disableGPURender = disableGPURender - print("replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) disableGPURender:\(disableGPURender) itemsCount:\(items().count)") super.replaceCurrentItem(with: item) + print("replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) disableGPURender:\(disableGPURender) itemsCount:\(items().count)") } public func replayLastItem() { guard let playerItem = lastPlayerItem else { return } - remove(playerItem) - insert(playerItem, after: nil) - let start = startTime ?? 0 - if playerItem.currentTime().seconds != start { - seekToTime(start, shouldPlayAfterSeeking: true) + replaceCurrentItem(with: playerItem) + if playerItem.currentTime().seconds != actualStartTime { + seekToTime(actualStartTime, shouldPlayAfterSeeking: true) } else { play() } @@ -171,6 +170,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } override public func removeAllItems() { + _stopLoopingIfNeeded(playerItem: currentItem) super.removeAllItems() print("remove all items") } @@ -212,17 +212,19 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { _resetTimeObservers() if loop { if let playerItem = lastPlayerItem { - looperDict[self]?.disableLooping() - let start = CMTime(seconds: startTime ?? 0, preferredTimescale: 600) - let end = CMTime(seconds: endTime ?? assetDuration, preferredTimescale: 600) + MoviePlayer.looperDict[self]?.disableLooping() + let start = CMTime(seconds: actualStartTime, preferredTimescale: 600) + let end = CMTime(seconds: endTime ?? playerItem.asset.duration.seconds, preferredTimescale: 600) let looper = AVPlayerLooper(player: self, templateItem: playerItem, timeRange: CMTimeRange(start: start, end: end)) - looperDict[self] = looper + MoviePlayer.looperDict[self] = looper } - } - if currentTime().seconds != (startTime ?? 0) { - seekToTime(startTime ?? 0, shouldPlayAfterSeeking: true) - } else { rate = playrate + } else { + if currentTime().seconds != actualStartTime { + seekToTime(actualStartTime, shouldPlayAfterSeeking: true) + } else { + rate = playrate + } } } @@ -249,8 +251,8 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { displayLink = nil isSeeking = false nextSeeking = nil - looperDict[self]?.disableLooping() - looperDict[self] = nil + MoviePlayer.looperDict[self]?.disableLooping() + MoviePlayer.looperDict[self] = nil } public func seekToTime(_ time: TimeInterval, shouldPlayAfterSeeking: Bool) { @@ -339,6 +341,14 @@ private extension MoviePlayer { } } + func _stopLoopingIfNeeded(playerItem: AVPlayerItem?) { + if loop, playerItem == currentItem, let looper = MoviePlayer.looperDict[self] { + looper.disableLooping() + MoviePlayer.looperDict[self] = nil + print("stop looping item:\(String(describing: playerItem))") + } + } + func _setupPlayerItemVideoOutput(for item: AVPlayerItem) { let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) @@ -386,7 +396,7 @@ private extension MoviePlayer { guard let self = self else { return } self.timeObserversQueue.removeAll() for observer in self.totalTimeObservers { - guard observer.targetTime >= (self.startTime ?? 0) && observer.targetTime <= self.endTime ?? self.assetDuration else { + guard observer.targetTime >= self.actualStartTime && observer.targetTime <= self.actualEndTime else { continue } self.timeObserversQueue.append(observer) @@ -463,11 +473,7 @@ private extension MoviePlayer { } @objc func displayLinkCallback(displayLink: CADisplayLink) { - guard currentItem != nil else { - stop() - return - } - + guard currentItem?.status == .readyToPlay else { return } let playTime = currentTime() // debugPrint("playtime:\(playTime.seconds)") guard playTime.seconds > 0 else { return } @@ -492,8 +498,6 @@ private extension MoviePlayer { @objc func playerDidPlayToEnd(notification: Notification) { print("player did play to end. notification:\(notification)") guard (notification.object as? AVPlayerItem) == currentItem else { return } - guard loop && isPlaying && (endTime == nil || currentTime().seconds == assetDuration) else { return } - start() } @objc func playerStalled(notification: Notification) { From 754e9f634ed1b4dd2b3a14d04ff1c235f35e9823 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 9 Dec 2019 01:32:05 +0800 Subject: [PATCH 186/332] improve(player): support video output for normal video --- framework/Source/Pipeline.swift | 2 +- framework/Source/iOS/MoviePlayer.swift | 60 +++++++++++++------------- 2 files changed, 30 insertions(+), 32 deletions(-) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index a9265970..9dd48bd0 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -111,7 +111,7 @@ class WeakImageConsumer { public class TargetContainer:Sequence { private var targets = [WeakImageConsumer]() - private var count:Int { get { return targets.count } } + var count:Int { get { return targets.count } } #if !os(Linux) let dispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.targetContainerQueue", attributes: []) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index faca96fb..b98bfe50 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -65,6 +65,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public var didPlayToEnd: Bool { return currentTime().seconds >= assetDuration } + public var hasTarget: Bool { targets.count > 0 } var framebufferUserInfo: [AnyHashable:Any]? var observations = [NSKeyValueObservation]() @@ -84,7 +85,8 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } var nextSeeking: SeekingInfo? public var isSeeking = false - public var disableGPURender = false + public var enableVideoOutput = false + private var isProcessing = false public override init() { print("movie player init") @@ -93,13 +95,6 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { super.init() } - override public init(playerItem item: AVPlayerItem?) { - // Make sure player it intialized on the main thread, or it might cause KVO crash - assert(Thread.isMainThread) - super.init(playerItem: item) - replaceCurrentItem(with: item) - } - deinit { print("movie player deinit \(String(describing: asset))") pause() @@ -109,38 +104,42 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { // MARK: Data Source public func replaceCurrentItem(with url: URL) { + replaceCurrentItem(with: url, enableVideoOutput: enableVideoOutput) + } + + public func replaceCurrentItem(with url: URL, enableVideoOutput: Bool) { let inputAsset = AVURLAsset(url: url) let playerItem = AVPlayerItem(asset: inputAsset, automaticallyLoadedAssetKeys: [AVURLAssetPreferPreciseDurationAndTimingKey]) - replaceCurrentItem(with: playerItem) + replaceCurrentItem(with: playerItem, enableVideoOutput: enableVideoOutput) } override public func insert(_ item: AVPlayerItem, after afterItem: AVPlayerItem?) { - insert(item, after: afterItem, disableGPURender: disableGPURender) + insert(item, after: afterItem, enableVideoOutput: enableVideoOutput) } - public func insert(_ item: AVPlayerItem, after afterItem: AVPlayerItem?, disableGPURender: Bool) { - if !disableGPURender { + public func insert(_ item: AVPlayerItem, after afterItem: AVPlayerItem?, enableVideoOutput: Bool) { + if enableVideoOutput { _setupPlayerItemVideoOutput(for: item) } item.audioTimePitchAlgorithm = .varispeed lastPlayerItem = item - self.disableGPURender = disableGPURender + self.enableVideoOutput = enableVideoOutput _setupPlayerObservers(playerItem: item) super.insert(item, after: afterItem) - print("insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) disableGPURender:\(disableGPURender) itemsCount:\(items().count)") + print("insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) itemsCount:\(items().count)") } override public func replaceCurrentItem(with item: AVPlayerItem?) { - replaceCurrentItem(with: item, disableGPURender: disableGPURender) + replaceCurrentItem(with: item, enableVideoOutput: enableVideoOutput) } - public func replaceCurrentItem(with item: AVPlayerItem?, disableGPURender: Bool) { + public func replaceCurrentItem(with item: AVPlayerItem?, enableVideoOutput: Bool) { if isPlaying { stop() } lastPlayerItem = item if let item = item { - if !disableGPURender { + if enableVideoOutput { _setupPlayerItemVideoOutput(for: item) } item.audioTimePitchAlgorithm = .varispeed @@ -148,9 +147,9 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } else { _removePlayerObservers() } - self.disableGPURender = disableGPURender + self.enableVideoOutput = enableVideoOutput super.replaceCurrentItem(with: item) - print("replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) disableGPURender:\(disableGPURender) itemsCount:\(items().count)") + print("replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) enableVideoOutput:\(enableVideoOutput) itemsCount:\(items().count)") } public func replayLastItem() { @@ -466,7 +465,7 @@ private extension MoviePlayer { } } - guard !disableGPURender, let framebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: playTime, videoOrientation: videoOrientation) else { return } + guard hasTarget, let framebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: playTime, videoOrientation: videoOrientation) else { return } framebuffer.userInfo = framebufferUserInfo updateTargetsWithFramebuffer(framebuffer) @@ -475,19 +474,18 @@ private extension MoviePlayer { @objc func displayLinkCallback(displayLink: CADisplayLink) { guard currentItem?.status == .readyToPlay else { return } let playTime = currentTime() -// debugPrint("playtime:\(playTime.seconds)") guard playTime.seconds > 0 else { return } - if let videoOutput = videoOutput, videoOutput.hasNewPixelBuffer(forItemTime: playTime) == true { - if !disableGPURender { - sharedImageProcessingContext.runOperationAsynchronously { [weak self] in - self?._process(videoOutput: videoOutput, at: playTime) - } - } else { - _process(videoOutput: videoOutput, at: playTime) + + _notifyTimeObserver(with: playTime) + + guard !isProcessing else { return } + guard let videoOutput = videoOutput, videoOutput.hasNewPixelBuffer(forItemTime: playTime) == true else { return } + isProcessing = true + sharedImageProcessingContext.runOperationAsynchronously { [weak self] in + defer { + self?.isProcessing = false } - } - if playTime.seconds > 0 { - _notifyTimeObserver(with: playTime) + self?._process(videoOutput: videoOutput, at: playTime) } } From a4aff55b7bf8a970bd7c57e986b600d30a073749 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 10 Dec 2019 12:49:08 +0800 Subject: [PATCH 187/332] debug: add log for printing filter pipeline --- framework/Source/Pipeline.swift | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 9dd48bd0..1dd80740 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -255,3 +255,26 @@ public class ImageRelay: ImageProcessingOperation { } } } + +#if DEBUG +public extension ImageSource { + var debugPipelineNext: String { + let nextInfos: [String] = targets.map { + if let operationGroup = $0.0 as? OperationGroup { + return operationGroup.inputImageRelay.debugPipelineNext + } else if let operation = $0.0 as? ImageProcessingOperation { + return operation.debugPipelineNext + } else { + return $0.0.debugPipelineEnd + } + } + return "{'\(self)':[\(nextInfos.joined(separator: ","))]}" + } +} + +public extension ImageConsumer { + var debugPipelineEnd: String { + return "'\(self)'" + } +} +#endif From f9335e8e0eef7ed40aa0304962f6dee602e2aebf Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 10 Dec 2019 13:33:56 +0800 Subject: [PATCH 188/332] fix(player): fix observation crash on iOS 10 --- framework/Source/iOS/MoviePlayer.swift | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index b98bfe50..2eb88cf7 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -97,9 +97,9 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { deinit { print("movie player deinit \(String(describing: asset))") + assert(observations.isEmpty, "observers must be removed before deinit") pause() displayLink?.invalidate() - _removePlayerObservers() } // MARK: Data Source @@ -271,6 +271,12 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } } + /// Cleanup all player resource and observers. This must be called before deinit, or it might crash on iOS 10 due to observation assertion. + public func cleanup() { + stop() + _removePlayerObservers() + } + func actuallySeekToTime() { // Avoid seeking choppy when fast seeking // https://developer.apple.com/library/archive/qa/qa1820/_index.html#//apple_ref/doc/uid/DTS40016828 From b8ca57711f7fccdd0cbff24f31473f2b426dba05 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 10 Dec 2019 19:47:06 +0800 Subject: [PATCH 189/332] fix(player): fix looping is not correctly stopped after replacing --- framework/Source/iOS/MoviePlayer.swift | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 2eb88cf7..f05ae388 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -138,6 +138,10 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { stop() } lastPlayerItem = item + // Stop looping before replacing + if loop && MoviePlayer.looperDict[self] != nil { + removeAllItems() + } if let item = item { if enableVideoOutput { _setupPlayerItemVideoOutput(for: item) @@ -169,7 +173,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } override public func removeAllItems() { - _stopLoopingIfNeeded(playerItem: currentItem) + _stopLoopingIfNeeded() super.removeAllItems() print("remove all items") } @@ -346,11 +350,11 @@ private extension MoviePlayer { } } - func _stopLoopingIfNeeded(playerItem: AVPlayerItem?) { - if loop, playerItem == currentItem, let looper = MoviePlayer.looperDict[self] { + func _stopLoopingIfNeeded() { + if loop, let looper = MoviePlayer.looperDict[self] { looper.disableLooping() MoviePlayer.looperDict[self] = nil - print("stop looping item:\(String(describing: playerItem))") + print("stop looping item)") } } From e8e8671196280fe1a6bfb149deb8b629813a3bcd Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 11 Dec 2019 16:10:58 +0800 Subject: [PATCH 190/332] fix(player): change timeObserver callback timing to after rendering --- framework/Source/iOS/MoviePlayer.swift | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index f05ae388..2870e4c9 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -486,9 +486,10 @@ private extension MoviePlayer { let playTime = currentTime() guard playTime.seconds > 0 else { return } - _notifyTimeObserver(with: playTime) - - guard !isProcessing else { return } + guard !isProcessing else { + _notifyTimeObserver(with: playTime) + return + } guard let videoOutput = videoOutput, videoOutput.hasNewPixelBuffer(forItemTime: playTime) == true else { return } isProcessing = true sharedImageProcessingContext.runOperationAsynchronously { [weak self] in @@ -496,6 +497,7 @@ private extension MoviePlayer { self?.isProcessing = false } self?._process(videoOutput: videoOutput, at: playTime) + self?._notifyTimeObserver(with: playTime) } } From a2a0883400b9e224b6d0b5f3b2f8d6feebcb3e43 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 11 Dec 2019 22:28:01 +0800 Subject: [PATCH 191/332] fix(player): change timeObserver callback timing to after rendering, better fix --- framework/Source/iOS/MoviePlayer.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 2870e4c9..0d662817 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -486,11 +486,11 @@ private extension MoviePlayer { let playTime = currentTime() guard playTime.seconds > 0 else { return } - guard !isProcessing else { + guard let videoOutput = videoOutput else { _notifyTimeObserver(with: playTime) return } - guard let videoOutput = videoOutput, videoOutput.hasNewPixelBuffer(forItemTime: playTime) == true else { return } + guard !isProcessing, videoOutput.hasNewPixelBuffer(forItemTime: playTime) == true else { return } isProcessing = true sharedImageProcessingContext.runOperationAsynchronously { [weak self] in defer { From a3d1fe18a60f861ed5f8ef1911f1c6b3d55950a3 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 11 Dec 2019 22:40:39 +0800 Subject: [PATCH 192/332] fix(player): fix replaceCurrentItem/insertItem is not working when playerItem did play to end --- framework/Source/iOS/MoviePlayer.swift | 34 +++++++++++++++++++++----- 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 0d662817..28997f46 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -87,6 +87,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public var isSeeking = false public var enableVideoOutput = false private var isProcessing = false + private var needAddItemAfterDidEndNotify = false public override init() { print("movie player init") @@ -125,7 +126,11 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { lastPlayerItem = item self.enableVideoOutput = enableVideoOutput _setupPlayerObservers(playerItem: item) - super.insert(item, after: afterItem) + if shouldDelayAddPlayerItem { + needAddItemAfterDidEndNotify = true + } else { + super.insert(item, after: afterItem) + } print("insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) itemsCount:\(items().count)") } @@ -134,9 +139,6 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } public func replaceCurrentItem(with item: AVPlayerItem?, enableVideoOutput: Bool) { - if isPlaying { - stop() - } lastPlayerItem = item // Stop looping before replacing if loop && MoviePlayer.looperDict[self] != nil { @@ -152,7 +154,11 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { _removePlayerObservers() } self.enableVideoOutput = enableVideoOutput - super.replaceCurrentItem(with: item) + if shouldDelayAddPlayerItem { + needAddItemAfterDidEndNotify = true + } else { + super.replaceCurrentItem(with: item) + } print("replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) enableVideoOutput:\(enableVideoOutput) itemsCount:\(items().count)") } @@ -210,6 +216,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { return } isPlaying = true + isProcessing = false print("movie player start duration:\(String(describing: asset?.duration.seconds)) \(String(describing: asset))") _setupDisplayLinkIfNeeded() _resetTimeObservers() @@ -505,9 +512,24 @@ private extension MoviePlayer { return currentItem?.outputs.first(where: { $0 is AVPlayerItemVideoOutput }) as? AVPlayerItemVideoOutput } + /// Wait for didPlayToEnd notification and add a new playerItem. + var shouldDelayAddPlayerItem: Bool { + // NOTE: AVQueuePlayer will remove new added item immediately after inserting if last item has already played to end. + // The workaround solution is to add new item after playerDidPlayToEnd notification. + return didPlayToEnd && items().count == 1 && !loop + } + @objc func playerDidPlayToEnd(notification: Notification) { - print("player did play to end. notification:\(notification)") + print("player did play to end. notification:\(notification) items:\(items())") guard (notification.object as? AVPlayerItem) == currentItem else { return } + if needAddItemAfterDidEndNotify && isPlaying { + DispatchQueue.main.async() { [weak self] in + guard let self = self else { return } + self.needAddItemAfterDidEndNotify = false + self.lastPlayerItem.map { self.insert($0, after: nil) } + self.play() + } + } } @objc func playerStalled(notification: Notification) { From ac991d169b005e002b1cfd81cfb91e20c1fb22b0 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 13 Dec 2019 23:16:27 +0800 Subject: [PATCH 193/332] fix(player): improve playToEnd insert items logic --- framework/Source/iOS/MoviePlayer.swift | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 28997f46..16bde396 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -88,6 +88,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public var enableVideoOutput = false private var isProcessing = false private var needAddItemAfterDidEndNotify = false + private lazy var pendingNewItems = [AVPlayerItem]() public override init() { print("movie player init") @@ -128,6 +129,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { _setupPlayerObservers(playerItem: item) if shouldDelayAddPlayerItem { needAddItemAfterDidEndNotify = true + pendingNewItems.append(item) } else { super.insert(item, after: afterItem) } @@ -154,8 +156,9 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { _removePlayerObservers() } self.enableVideoOutput = enableVideoOutput - if shouldDelayAddPlayerItem { + if shouldDelayAddPlayerItem && item != nil { needAddItemAfterDidEndNotify = true + pendingNewItems.append(item!) } else { super.replaceCurrentItem(with: item) } @@ -284,6 +287,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { /// Cleanup all player resource and observers. This must be called before deinit, or it might crash on iOS 10 due to observation assertion. public func cleanup() { + pendingNewItems.removeAll() stop() _removePlayerObservers() } @@ -522,12 +526,14 @@ private extension MoviePlayer { @objc func playerDidPlayToEnd(notification: Notification) { print("player did play to end. notification:\(notification) items:\(items())") guard (notification.object as? AVPlayerItem) == currentItem else { return } - if needAddItemAfterDidEndNotify && isPlaying { + if needAddItemAfterDidEndNotify { DispatchQueue.main.async() { [weak self] in guard let self = self else { return } self.needAddItemAfterDidEndNotify = false - self.lastPlayerItem.map { self.insert($0, after: nil) } - self.play() + self.pendingNewItems.forEach { self.insert($0, after: nil) } + if self.isPlaying { + self.play() + } } } } From 5c46f49110ee55315779488a77a25ef5abbade83 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 14 Dec 2019 16:48:48 +0800 Subject: [PATCH 194/332] fix(player): improve playToEnd insert items logic, continue --- framework/Source/iOS/MoviePlayer.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 16bde396..89786b8b 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -531,6 +531,7 @@ private extension MoviePlayer { guard let self = self else { return } self.needAddItemAfterDidEndNotify = false self.pendingNewItems.forEach { self.insert($0, after: nil) } + self.pendingNewItems.removeAll() if self.isPlaying { self.play() } From b9b20a504b506d213e6d18e7cd3d63cd6ab67be7 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 19 Dec 2019 02:51:55 +0800 Subject: [PATCH 195/332] fix(player): fix OOM crash due to player item duration too short, change to another looping logic --- framework/Source/iOS/MoviePlayer.swift | 38 ++++++++++++++++++++------ 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 89786b8b..43ffa10f 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -34,6 +34,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public var actualStartTime: TimeInterval { startTime ?? 0 } public var endTime: TimeInterval? public var actualEndTime: TimeInterval { endTime ?? (assetDuration - actualStartTime) } + public var actualDuration: TimeInterval { actualEndTime - actualStartTime } /// Whether to loop play. public var loop = false public var asset: AVAsset? { return playableItem?.asset } @@ -89,6 +90,10 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { private var isProcessing = false private var needAddItemAfterDidEndNotify = false private lazy var pendingNewItems = [AVPlayerItem]() + private var shouldUseLooper: Bool { + // NOTE: if video duration too short, it will cause OOM. So it is better to use "actionItemAtEnd=.none + playToEnd + seek" solution. + return false + } public override init() { print("movie player init") @@ -131,6 +136,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { needAddItemAfterDidEndNotify = true pendingNewItems.append(item) } else { + remove(item) super.insert(item, after: afterItem) } print("insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) itemsCount:\(items().count)") @@ -223,7 +229,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { print("movie player start duration:\(String(describing: asset?.duration.seconds)) \(String(describing: asset))") _setupDisplayLinkIfNeeded() _resetTimeObservers() - if loop { + if shouldUseLooper { if let playerItem = lastPlayerItem { MoviePlayer.looperDict[self]?.disableLooping() let start = CMTime(seconds: actualStartTime, preferredTimescale: 600) @@ -233,6 +239,9 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } rate = playrate } else { + if loop { + actionAtItemEnd = .none + } if currentTime().seconds != actualStartTime { seekToTime(actualStartTime, shouldPlayAfterSeeking: true) } else { @@ -370,6 +379,7 @@ private extension MoviePlayer { } func _setupPlayerItemVideoOutput(for item: AVPlayerItem) { + guard !item.outputs.contains(where: { $0 is AVPlayerItemVideoOutput }) else { return } let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) videoOutput.suppressesPlayerRendering = true @@ -421,14 +431,9 @@ private extension MoviePlayer { } self.timeObserversQueue.append(observer) } - if !self.loop, let endTime = self.endTime { + if !self.shouldUseLooper, let endTime = self.endTime { let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in - if self?.loop == true && self?.isPlaying == true { - self?.pause() - self?.start() - } else { - self?.pause() - } + self?.onCurrentItemPlayToEnd() } let insertIndex: Int = self.timeObserversQueue.reversed().firstIndex { endTime < $0.targetTime } ?? 0 self.timeObserversQueue.insert(endTimeObserver, at: insertIndex) @@ -436,6 +441,17 @@ private extension MoviePlayer { } } + func onCurrentItemPlayToEnd() { + if loop == true && isPlaying == true { + // calling start() directly will cause recursive method call + DispatchQueue.main.async { [weak self] in + self?.start() + } + } else { + pause() + } + } + func playerRateDidChange() { // debugPrint("rate change to:\(player.rate) asset:\(asset) status:\(player.status.rawValue)") resumeIfNeeded() @@ -520,7 +536,7 @@ private extension MoviePlayer { var shouldDelayAddPlayerItem: Bool { // NOTE: AVQueuePlayer will remove new added item immediately after inserting if last item has already played to end. // The workaround solution is to add new item after playerDidPlayToEnd notification. - return didPlayToEnd && items().count == 1 && !loop + return didPlayToEnd && items().count == 1 && !shouldUseLooper } @objc func playerDidPlayToEnd(notification: Notification) { @@ -536,6 +552,10 @@ private extension MoviePlayer { self.play() } } + } else { + DispatchQueue.main.async() { [weak self] in + self?.onCurrentItemPlayToEnd() + } } } From 532168310bb379c99f58c7e205dcb19b90f9aca5 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 19 Dec 2019 15:10:02 +0800 Subject: [PATCH 196/332] improve(player): improve seeking time accuracy --- framework/Source/iOS/MoviePlayer.swift | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 43ffa10f..f132d9b8 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -90,6 +90,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { private var isProcessing = false private var needAddItemAfterDidEndNotify = false private lazy var pendingNewItems = [AVPlayerItem]() + private var pendingSeekInfo: SeekingInfo? private var shouldUseLooper: Bool { // NOTE: if video duration too short, it will cause OOM. So it is better to use "actionItemAtEnd=.none + playToEnd + seek" solution. return false @@ -149,7 +150,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public func replaceCurrentItem(with item: AVPlayerItem?, enableVideoOutput: Bool) { lastPlayerItem = item // Stop looping before replacing - if loop && MoviePlayer.looperDict[self] != nil { + if shouldUseLooper && MoviePlayer.looperDict[self] != nil { removeAllItems() } if let item = item { @@ -278,7 +279,10 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } public func seekToTime(_ time: TimeInterval, shouldPlayAfterSeeking: Bool) { - let targetTime = CMTime(seconds: time, preferredTimescale: 600) + seekToTime(CMTime(seconds: time, preferredTimescale: 48000), shouldPlayAfterSeeking: shouldPlayAfterSeeking) + } + + public func seekToTime(_ targetTime: CMTime, shouldPlayAfterSeeking: Bool) { if shouldPlayAfterSeeking { // 0.1s has 3 frames tolerance for 30 FPS video, it should be enough if there is no sticky video let toleranceTime = CMTime(seconds: 0.1, preferredTimescale: 600) From 08125349272dea0d3932d5cc24a5631f2e76e638 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 19 Dec 2019 15:35:32 +0800 Subject: [PATCH 197/332] fix(camera): fix possible frame dropping caused by retain too much sample buffer --- framework/Source/iOS/MovieOutput.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index fc67813c..05750d8e 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -464,7 +464,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if(self.encodingLiveVideo) { - movieProcessingContext.runOperationAsynchronously(work) + movieProcessingContext.runOperationSynchronously(work) } else { work() @@ -540,7 +540,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if(self.encodingLiveVideo) { - movieProcessingContext.runOperationAsynchronously(work) + movieProcessingContext.runOperationSynchronously(work) } else { work() From a87b8a0e47a9df8a2490660f780f26ab9144d65f Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 23 Dec 2019 13:15:11 +0800 Subject: [PATCH 198/332] improve(player): change to use CMTime for accuracy, and improve looping logic --- framework/Source/iOS/MoviePlayer.swift | 102 +++++++++++++++---------- 1 file changed, 61 insertions(+), 41 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index f132d9b8..3509250d 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -8,16 +8,16 @@ import AVFoundation public protocol MoviePlayerDelegate: class { - func moviePlayerDidReadPixelBuffer(_ pixelBuffer: CVPixelBuffer, time: TimeInterval) + func moviePlayerDidReadPixelBuffer(_ pixelBuffer: CVPixelBuffer, time: CMTime) } -public typealias MoviePlayerTimeObserverCallback = (TimeInterval) -> Void +public typealias MoviePlayerTimeObserverCallback = (CMTime) -> Void public struct MoviePlayerTimeObserver { - let targetTime: TimeInterval + let targetTime: CMTime let callback: MoviePlayerTimeObserverCallback let observerID: String - init(targetTime: TimeInterval, callback: @escaping MoviePlayerTimeObserverCallback) { + init(targetTime: CMTime, callback: @escaping MoviePlayerTimeObserverCallback) { self.targetTime = targetTime self.callback = callback observerID = UUID.init().uuidString @@ -30,13 +30,14 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public var runBenchmark = false public var logEnabled = false public weak var delegate: MoviePlayerDelegate? - public var startTime: TimeInterval? - public var actualStartTime: TimeInterval { startTime ?? 0 } - public var endTime: TimeInterval? - public var actualEndTime: TimeInterval { endTime ?? (assetDuration - actualStartTime) } - public var actualDuration: TimeInterval { actualEndTime - actualStartTime } + public var startTime: CMTime? + public var actualStartTime: CMTime { startTime ?? .zero } + public var endTime: CMTime? + public var actualEndTime: CMTime { endTime ?? CMTimeSubtract(assetDuration, actualStartTime) } + public var actualDuration: CMTime { actualEndTime - actualStartTime } /// Whether to loop play. public var loop = false + private var previousPlayerActionAtItemEnd: AVPlayer.ActionAtItemEnd? public var asset: AVAsset? { return playableItem?.asset } public private(set) var isPlaying = false public var lastPlayerItem: AVPlayerItem? @@ -53,8 +54,8 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { var totalFramesSent = 0 var totalFrameTime: Double = 0.0 public var playrate: Float = 1.0 - public var assetDuration: TimeInterval { - return asset?.duration.seconds ?? 0 + public var assetDuration: CMTime { + return asset?.duration ?? .zero } public var isReadyToPlay: Bool { return status == .readyToPlay @@ -64,7 +65,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { return asset.imageOrientation ?? .portrait } public var didPlayToEnd: Bool { - return currentTime().seconds >= assetDuration + return currentTime() >= assetDuration } public var hasTarget: Bool { targets.count > 0 } @@ -95,6 +96,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { // NOTE: if video duration too short, it will cause OOM. So it is better to use "actionItemAtEnd=.none + playToEnd + seek" solution. return false } + private var didTriggerEndTimeObserver = false public override init() { print("movie player init") @@ -175,7 +177,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public func replayLastItem() { guard let playerItem = lastPlayerItem else { return } replaceCurrentItem(with: playerItem) - if playerItem.currentTime().seconds != actualStartTime { + if playerItem.currentTime() != actualStartTime { seekToTime(actualStartTime, shouldPlayAfterSeeking: true) } else { play() @@ -233,9 +235,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { if shouldUseLooper { if let playerItem = lastPlayerItem { MoviePlayer.looperDict[self]?.disableLooping() - let start = CMTime(seconds: actualStartTime, preferredTimescale: 600) - let end = CMTime(seconds: endTime ?? playerItem.asset.duration.seconds, preferredTimescale: 600) - let looper = AVPlayerLooper(player: self, templateItem: playerItem, timeRange: CMTimeRange(start: start, end: end)) + let looper = AVPlayerLooper(player: self, templateItem: playerItem, timeRange: CMTimeRange(start: actualStartTime, end: actualEndTime)) MoviePlayer.looperDict[self] = looper } rate = playrate @@ -243,7 +243,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { if loop { actionAtItemEnd = .none } - if currentTime().seconds != actualStartTime { + if currentTime() != actualStartTime { seekToTime(actualStartTime, shouldPlayAfterSeeking: true) } else { rate = playrate @@ -291,7 +291,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } else { nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: .zero, toleranceAfter: .zero, shouldPlayAfterSeeking: shouldPlayAfterSeeking) } - if assetDuration <= 0 { + if assetDuration <= .zero { print("cannot seek since assetDuration is 0. currentItem:\(String(describing: currentItem))") } else { actuallySeekToTime() @@ -332,8 +332,8 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { // Not needed for movie inputs } - public func addTimeObserver(seconds: TimeInterval, callback: @escaping MoviePlayerTimeObserverCallback) -> MoviePlayerTimeObserver { - let timeObserver = MoviePlayerTimeObserver(targetTime: seconds, callback: callback) + public func addTimeObserver(at time: CMTime, callback: @escaping MoviePlayerTimeObserverCallback) -> MoviePlayerTimeObserver { + let timeObserver = MoviePlayerTimeObserver(targetTime: time, callback: callback) _timeObserversUpdate { [weak self] in guard let self = self else { return } self.totalTimeObservers.append(timeObserver) @@ -341,7 +341,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { return lhs.targetTime > rhs.targetTime } if self.isPlaying { - if let lastIndex = self.timeObserversQueue.firstIndex(where: { $0.targetTime >= seconds }) { + if let lastIndex = self.timeObserversQueue.firstIndex(where: { $0.targetTime >= time }) { self.timeObserversQueue.insert(timeObserver, at: lastIndex) } else { self.timeObserversQueue.append(timeObserver) @@ -364,6 +364,24 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { self?.totalTimeObservers.removeAll() } } + + public func setLoopEnabled(_ enabled: Bool, timeRange: CMTimeRange) { + if enabled { + if previousPlayerActionAtItemEnd == nil { + previousPlayerActionAtItemEnd = actionAtItemEnd + } + actionAtItemEnd = .none + startTime = timeRange.start + endTime = timeRange.end + assert(timeRange.start >= .zero || timeRange.end > .zero && CMTimeSubtract(timeRange.end, assetDuration) < .zero, "timerange is invalid. timerange:\(timeRange) assetDuration:\(assetDuration)") + } else { + actionAtItemEnd = previousPlayerActionAtItemEnd ?? .advance + startTime = nil + endTime = nil + } + _resetTimeObservers() + loop = enabled + } } private extension MoviePlayer { @@ -426,6 +444,7 @@ private extension MoviePlayer { } func _resetTimeObservers() { + didTriggerEndTimeObserver = false _timeObserversUpdate { [weak self] in guard let self = self else { return } self.timeObserversQueue.removeAll() @@ -435,24 +454,12 @@ private extension MoviePlayer { } self.timeObserversQueue.append(observer) } - if !self.shouldUseLooper, let endTime = self.endTime { - let endTimeObserver = MoviePlayerTimeObserver(targetTime: endTime) { [weak self] _ in - self?.onCurrentItemPlayToEnd() - } - let insertIndex: Int = self.timeObserversQueue.reversed().firstIndex { endTime < $0.targetTime } ?? 0 - self.timeObserversQueue.insert(endTimeObserver, at: insertIndex) - } } } func onCurrentItemPlayToEnd() { - if loop == true && isPlaying == true { - // calling start() directly will cause recursive method call - DispatchQueue.main.async { [weak self] in - self?.start() - } - } else { - pause() + if loop && isPlaying { + start() } } @@ -486,12 +493,19 @@ private extension MoviePlayer { // MARK: Internal processing functions func _process(videoOutput: AVPlayerItemVideoOutput, at playTime: CMTime) { - guard let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: nil) else { + var timeForDisplay: CMTime = .zero + guard let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: &timeForDisplay) else { print("Failed to copy pixel buffer at time:\(playTime)") return } - delegate?.moviePlayerDidReadPixelBuffer(pixelBuffer, time: CMTimeGetSeconds(playTime)) + // Out of range when looping, skip process. So that it won't show unexpected frames. + if loop && (timeForDisplay < actualStartTime || timeForDisplay >= actualEndTime) { + print("Skipped frame at time:\(timeForDisplay.seconds) is larger than range: [\(actualStartTime.seconds), \(actualEndTime.seconds)]") + return + } + + delegate?.moviePlayerDidReadPixelBuffer(pixelBuffer, time: timeForDisplay) let startTime = CFAbsoluteTimeGetCurrent() if runBenchmark || logEnabled { @@ -506,7 +520,7 @@ private extension MoviePlayer { } } - guard hasTarget, let framebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: playTime, videoOrientation: videoOrientation) else { return } + guard hasTarget, let framebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: timeForDisplay, videoOrientation: videoOrientation) else { return } framebuffer.userInfo = framebufferUserInfo updateTargetsWithFramebuffer(framebuffer) @@ -569,11 +583,17 @@ private extension MoviePlayer { } func _notifyTimeObserver(with sampleTime: CMTime) { - let currentTime = sampleTime.seconds + // Directly callback time play to end observer since it needs to be callbacked more timely, ex. seeking to start + if sampleTime > actualEndTime && !shouldUseLooper && endTime != nil && !didTriggerEndTimeObserver { + didTriggerEndTimeObserver = true + onCurrentItemPlayToEnd() + } + + // Other observers might has delay since it needs to wait for main thread _timeObserversUpdate { [weak self] in - while let lastObserver = self?.timeObserversQueue.last, lastObserver.targetTime <= currentTime { + while let lastObserver = self?.timeObserversQueue.last, lastObserver.targetTime <= sampleTime { self?.timeObserversQueue.removeLast() - lastObserver.callback(currentTime) + lastObserver.callback(sampleTime) } } } From 39f5e5b9de59468d1e2c9c32cd3cc0e254bc36e7 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 31 Dec 2019 13:17:34 +0800 Subject: [PATCH 199/332] improve(player): fix seeking cannot render frame --- framework/Source/iOS/MoviePlayer.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 3509250d..8d211338 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -500,7 +500,7 @@ private extension MoviePlayer { } // Out of range when looping, skip process. So that it won't show unexpected frames. - if loop && (timeForDisplay < actualStartTime || timeForDisplay >= actualEndTime) { + if loop && isPlaying && (timeForDisplay < actualStartTime || timeForDisplay >= actualEndTime) { print("Skipped frame at time:\(timeForDisplay.seconds) is larger than range: [\(actualStartTime.seconds), \(actualEndTime.seconds)]") return } From f80ffd111ec0ff36604352eadc9dfdbc1f49f6be Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 31 Dec 2019 19:12:40 +0800 Subject: [PATCH 200/332] improve(player): fix seeking not working if playerItem status haven't changed to readyToPlay --- framework/Source/iOS/MoviePlayer.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 8d211338..f2f784f3 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -481,10 +481,10 @@ private extension MoviePlayer { } func resumeIfNeeded() { - guard isReadyToPlay && isPlaying == true && rate != playrate else { return } + guard isReadyToPlay && isPlaying == true else { return } if nextSeeking != nil { actuallySeekToTime() - } else { + } else if rate != playrate { rate = playrate } } From 004073f5d5e09a5f81565ebbb47ba34f697b4a7d Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 6 Jan 2020 16:55:41 +0800 Subject: [PATCH 201/332] fix(transcoding): fix audio buffer infinitely waiting --- framework/Source/iOS/MovieInput.swift | 6 ++++-- framework/Source/iOS/MovieOutput.swift | 9 +++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 623ea61d..6cd777cc 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -445,9 +445,11 @@ public class MovieInput: ImageSource { } func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput:AVAssetReaderOutput) { + let shouldInvalidate = !transcodingOnly guard let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() else { if let movieOutput = self.synchronizedMovieOutput { movieOutput.movieProcessingContext.runOperationAsynchronously { + movieOutput.flushPendingAudioBuffers(shouldInvalidateSampleWhenDone: shouldInvalidate) movieOutput.audioEncodingIsFinished = true movieOutput.assetWriterAudioInput?.markAsFinished() } @@ -460,10 +462,10 @@ public class MovieInput: ImageSource { if let movieOutput = self.synchronizedMovieOutput { movieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in guard let self = self else { return } - self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: !self.transcodingOnly) + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidate) } } else { - audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: !transcodingOnly) + audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidate) } } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 05750d8e..c82e7071 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -512,6 +512,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { while(!assetWriterAudioInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.audioEncodingIsFinished) { self.synchronizedEncodingDebugPrint("Audio waiting...") usleep(100000) + if !assetWriterAudioInput.isReadyForMoreMediaData { + self.synchronizedEncodingDebugPrint("Audio still not ready, skip this runloop...") + return + } } guard self.previousFrameTime != nil else { @@ -547,6 +551,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } + public func flushPendingAudioBuffers(shouldInvalidateSampleWhenDone: Bool) { + guard let lastBuffer = pendingAudioBuffers.popLast() else { return } + processAudioBuffer(lastBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) + } + // Note: This is not used for synchronized encoding, only live video. public func readyForNextAudioBuffer() -> Bool { return true From be055fc18aae8f6d8aafe302f458ac81d55a0803 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 9 Jan 2020 02:17:46 +0800 Subject: [PATCH 202/332] chore: update MoviePlayer log --- framework/Source/iOS/MoviePlayer.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index f2f784f3..25ebbe36 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -229,7 +229,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } isPlaying = true isProcessing = false - print("movie player start duration:\(String(describing: asset?.duration.seconds)) \(String(describing: asset))") + print("movie player start duration:\(String(describing: asset?.duration.seconds)) items:\(String(describing: items()))") _setupDisplayLinkIfNeeded() _resetTimeObservers() if shouldUseLooper { From e4f1a0c8303a8eb0a8912a191b3546349c77da0a Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 12 Jan 2020 22:57:47 +0800 Subject: [PATCH 203/332] improve(MovieOutput): support video buffer cache --- framework/Source/iOS/MovieOutput.swift | 165 +++++++++++++++++++------ 1 file changed, 128 insertions(+), 37 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index c82e7071..2fe758ea 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -76,6 +76,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var audioSourceFormatHint:CMFormatDescription? let movieProcessingContext:OpenGLContext + var outputBufferCache = [(CVPixelBuffer, CMTime)]() + public private(set) var cacheBuffersDuration: TimeInterval = 0 + var isCaching = false var synchronizedEncodingDebug = false var totalFramesAppended:Int = 0 @@ -151,6 +154,17 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.keepLastPixelBuffer = keepLastPixelBuffer } + public func startCachingWithoutWriting(duration: TimeInterval) { + print("MovieOutput starting caching. duration:\(duration)") + isCaching = true + cacheBuffersDuration = duration + } + + public func finishCachingAndStartWriting() { + print("MovieOutput finish caching and start writing. cached buffer count:\(outputBufferCache.count)") + isCaching = false + } + public func startRecording(sync: Bool = false, _ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { // Don't do this work on the movieProcessingContext queue so we don't block it. // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) @@ -234,6 +248,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.assetWriter.endSession(atSourceTime: lastFrame) } self.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?.outputBufferCache.removeAll() self?.pendingAudioBuffers.removeAll() } @@ -260,6 +275,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.isRecording = false self.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?.outputBufferCache.removeAll() self?.pendingAudioBuffers.removeAll() } @@ -272,6 +288,51 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); + let cache = { [weak self] in + guard let self = self else { return } + // Discard first n frames + if self.dropFirstFrames > 0 { + self.dropFirstFrames -= 1 + self.synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(self.dropFirstFrames)") + return + } + guard self.isRecording, + self.assetWriter.status == .writing, + !self.videoEncodingIsFinished else { + self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") + return + } + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } + if self.keepLastPixelBuffer { + self.pixelBuffer = nil + } + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) + guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { + print("[Caching] WARNING: Unable to create pixel buffer, dropping frame") + return + } + do { + try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) + self.outputBufferCache.append((self.pixelBuffer!, frameTime)) + print("[Caching] appended new buffer at:\(frameTime.seconds)") + if let firstBufferTime = self.outputBufferCache.first?.1, CMTimeSubtract(frameTime, firstBufferTime).seconds > self.cacheBuffersDuration { + let firstBuffer = self.outputBufferCache.removeFirst() + print("[Caching] caching duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(firstBuffer.1.seconds)") + } + } catch { + print("[Caching] WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") + } + + CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + if !self.keepLastPixelBuffer { + self.pixelBuffer = nil + } + + sharedImageProcessingContext.runOperationAsynchronously { + framebuffer.unlock() + } + } + let work = { [weak self] in guard let self = self else { return } // Discard first n frames @@ -296,15 +357,16 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if (self.previousFrameTime == nil) { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. - self.assetWriter.startSession(atSourceTime: frameTime) - self.startFrameTime = frameTime - self.delegate?.movieOutputDidStartWriting(self, at: frameTime) + let startFrameTime = self.outputBufferCache.first?.1 ?? frameTime + self.assetWriter.startSession(atSourceTime: startFrameTime) + self.startFrameTime = startFrameTime + self.delegate?.movieOutputDidStartWriting(self, at: startFrameTime) } self.previousFrameTime = frameTime guard (self.assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { - print("Had to drop a frame at time \(frameTime)") + print("WARNING: Had to drop a frame at time \(frameTime)") return } @@ -323,39 +385,68 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if self.keepLastPixelBuffer { self.pixelBuffer = nil } - let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) - guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { - print("WARNING: Unable to create pixel buffer, dropping frame") - return - } - do { - try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) + func __renderBuffer() { + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) + guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { + print("WARNING: Unable to create pixel buffer, dropping frame") + return + } + do { + try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) + self.outputBufferCache.append((self.pixelBuffer!, frameTime)) + self.synchronizedEncodingDebugPrint("Process frame output. Time:\(CMTimeGetSeconds(frameTime))") + } + catch { + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") + } - self.synchronizedEncodingDebugPrint("Process frame output. Time:\(CMTimeGetSeconds(frameTime))") + CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + if !self.keepLastPixelBuffer { + self.pixelBuffer = nil + } - try NSObject.catchException { - if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") - } + sharedImageProcessingContext.runOperationAsynchronously { + framebuffer.unlock() } } - catch { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") - } - if(self.synchronizedEncodingDebug) { - self.totalFramesAppended += 1 + func __appendBuffers() { + var appendedBufferCount = 0 + do { + try NSObject.catchException { + // Drain all cached buffers at first + if !self.outputBufferCache.isEmpty { + for (i, (buffer, time)) in self.outputBufferCache.enumerated() { + print("appending video buffer \(i+1)/\(self.outputBufferCache.count) at:\(time.seconds)") + if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime: time)) { + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") + break + } + appendedBufferCount += 1 + if(self.synchronizedEncodingDebug) { + self.totalFramesAppended += 1 + } + } + } + } + } + catch { + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") + } + self.outputBufferCache.removeFirst(appendedBufferCount) } - CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - if !self.keepLastPixelBuffer { - self.pixelBuffer = nil + if !self.outputBufferCache.isEmpty { + // If outputBufferCache has too much buffers, processing current buffers to ease the burden of videoInput, or it will crash + __appendBuffers() + __renderBuffer() + __appendBuffers() + } else { + __renderBuffer() + __appendBuffers() } - sharedImageProcessingContext.runOperationAsynchronously { - framebuffer.unlock() - } if self.videoEncodingIsFinished { self.assetWriterVideoInput.markAsFinished() } @@ -365,13 +456,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext que // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur // since the framebuffers come in much slower than during synchronized encoding. - movieProcessingContext.runOperationAsynchronously(work) + movieProcessingContext.runOperationAsynchronously(isCaching ? cache : work) } else { // This is done synchronously to prevent framebuffers from piling up during synchronized encoding. // If we don't force the sharedImageProcessingContext queue to wait for this frame to finish processing it will // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext queue would run when the system wants it to. - movieProcessingContext.runOperationSynchronously(work) + movieProcessingContext.runOperationSynchronously(isCaching ? cache : work) } } @@ -498,17 +589,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + self.pendingAudioBuffers.append(sampleBuffer) guard (assetWriterAudioInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { - print("Had to drop a audio sample at time \(currentSampleTime)") - if shouldInvalidateSampleWhenDone { - CMSampleBufferInvalidate(sampleBuffer) - } + print("Had to delay a audio sample at time \(currentSampleTime)") return } - self.pendingAudioBuffers.append(sampleBuffer) - while(!assetWriterAudioInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.audioEncodingIsFinished) { self.synchronizedEncodingDebugPrint("Audio waiting...") usleep(100000) @@ -525,13 +612,16 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.synchronizedEncodingDebugPrint("Process audio sample output. Time:\(CMTimeGetSeconds(currentSampleTime))") + var appendedBufferCount = 0 do { try NSObject.catchException { - while let audioBuffer = self.pendingAudioBuffers.first { + for (i, audioBuffer) in self.pendingAudioBuffers.enumerated() { + print("appending audio buffer \(i+1)/\(self.pendingAudioBuffers.count) at:\(CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer).seconds)") if (!assetWriterAudioInput.append(audioBuffer)) { print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + break } - self.pendingAudioBuffers.removeFirst() + appendedBufferCount += 1 if shouldInvalidateSampleWhenDone { CMSampleBufferInvalidate(audioBuffer) } @@ -541,6 +631,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { catch { print("WARNING: Trouble appending audio sample buffer: \(error)") } + self.pendingAudioBuffers.removeFirst(appendedBufferCount) } if(self.encodingLiveVideo) { From 88287eef6dbdf4681667ca97a80159da34058e9d Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 12 Jan 2020 23:51:49 +0800 Subject: [PATCH 204/332] improve(MovieOutput): support video sample buffer cache --- framework/Source/iOS/MovieOutput.swift | 170 ++++++++++++++++--------- 1 file changed, 113 insertions(+), 57 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 2fe758ea..0b8ec689 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -65,7 +65,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } } - var pendingAudioBuffers = [CMSampleBuffer]() public private(set) var pixelBuffer:CVPixelBuffer? = nil public var dropFirstFrames: Int = 0 public var waitUtilDataIsReadyForLiveVideo = false @@ -76,7 +75,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var audioSourceFormatHint:CMFormatDescription? let movieProcessingContext:OpenGLContext - var outputBufferCache = [(CVPixelBuffer, CMTime)]() + var videoPixelBufferCache = [(CVPixelBuffer, CMTime)]() + var videoSampleBufferCache = NSMutableArray() + var audioSampleBufferCache = [CMSampleBuffer]() public private(set) var cacheBuffersDuration: TimeInterval = 0 var isCaching = false @@ -154,17 +155,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.keepLastPixelBuffer = keepLastPixelBuffer } - public func startCachingWithoutWriting(duration: TimeInterval) { - print("MovieOutput starting caching. duration:\(duration)") - isCaching = true - cacheBuffersDuration = duration - } - - public func finishCachingAndStartWriting() { - print("MovieOutput finish caching and start writing. cached buffer count:\(outputBufferCache.count)") - isCaching = false - } - public func startRecording(sync: Bool = false, _ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { // Don't do this work on the movieProcessingContext queue so we don't block it. // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) @@ -227,8 +217,20 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } + public func startCachingWithoutWriting(duration: TimeInterval) { + print("MovieOutput starting caching. duration:\(duration)") + isCaching = true + cacheBuffersDuration = duration + } + + public func finishCachingAndStartWriting() { + print("MovieOutput finish caching and start writing. cached buffer: videoPixelBuffers:\(videoPixelBufferCache.count) audioSampleBuffer:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") + isCaching = false + } + public func finishRecording(_ completionCallback:(() -> Void)? = nil) { MovieOutput.assetWriterQueue.async { + self._cleanBufferCaches() guard self.isRecording, self.assetWriter.status == .writing else { completionCallback?() @@ -247,10 +249,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // the session's samples (that is, no samples will be edited out at the end)." self.assetWriter.endSession(atSourceTime: lastFrame) } - self.movieProcessingContext.runOperationAsynchronously { [weak self] in - self?.outputBufferCache.removeAll() - self?.pendingAudioBuffers.removeAll() - } if let lastFrame = self.previousFrameTime, let startFrame = self.startFrameTime { self.recordedDuration = lastFrame - startFrame @@ -263,8 +261,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func cancelRecording(_ completionCallback:(() -> Void)? = nil) { - MovieOutput.assetWriterQueue.async { - guard self.isRecording, + MovieOutput.assetWriterQueue.async { [weak self] in + self?._cleanBufferCaches() + guard let self = self, self.isRecording, self.assetWriter.status == .writing else { completionCallback?() return @@ -274,10 +273,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.videoEncodingIsFinished = true self.isRecording = false - self.movieProcessingContext.runOperationAsynchronously { [weak self] in - self?.outputBufferCache.removeAll() - self?.pendingAudioBuffers.removeAll() - } self.assetWriter.cancelWriting() completionCallback?() @@ -285,6 +280,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } + private func _cleanBufferCaches() { + movieProcessingContext.runOperationAsynchronously { [weak self] in + self?.videoPixelBufferCache.removeAll() + self?.videoSampleBufferCache.removeAllObjects() + self?.audioSampleBufferCache.removeAll() + } + } + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); @@ -313,11 +316,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } do { try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) - self.outputBufferCache.append((self.pixelBuffer!, frameTime)) + self.videoPixelBufferCache.append((self.pixelBuffer!, frameTime)) print("[Caching] appended new buffer at:\(frameTime.seconds)") - if let firstBufferTime = self.outputBufferCache.first?.1, CMTimeSubtract(frameTime, firstBufferTime).seconds > self.cacheBuffersDuration { - let firstBuffer = self.outputBufferCache.removeFirst() - print("[Caching] caching duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(firstBuffer.1.seconds)") + while let firstBufferTime = self.videoPixelBufferCache.first?.1, CMTimeSubtract(frameTime, firstBufferTime).seconds > self.cacheBuffersDuration { + let firstBuffer = self.videoPixelBufferCache.removeFirst() + print("[Caching] caching video duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(firstBuffer.1.seconds)") } } catch { print("[Caching] WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") @@ -357,7 +360,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if (self.previousFrameTime == nil) { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. - let startFrameTime = self.outputBufferCache.first?.1 ?? frameTime + let startFrameTime = self.videoPixelBufferCache.first?.1 ?? frameTime self.assetWriter.startSession(atSourceTime: startFrameTime) self.startFrameTime = startFrameTime self.delegate?.movieOutputDidStartWriting(self, at: startFrameTime) @@ -394,7 +397,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } do { try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) - self.outputBufferCache.append((self.pixelBuffer!, frameTime)) + self.videoPixelBufferCache.append((self.pixelBuffer!, frameTime)) self.synchronizedEncodingDebugPrint("Process frame output. Time:\(CMTimeGetSeconds(frameTime))") } catch { @@ -416,9 +419,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { do { try NSObject.catchException { // Drain all cached buffers at first - if !self.outputBufferCache.isEmpty { - for (i, (buffer, time)) in self.outputBufferCache.enumerated() { - print("appending video buffer \(i+1)/\(self.outputBufferCache.count) at:\(time.seconds)") + if !self.videoPixelBufferCache.isEmpty { + for (i, (buffer, time)) in self.videoPixelBufferCache.enumerated() { + print("appending video pixel buffer \(i+1)/\(self.videoPixelBufferCache.count) at:\(time.seconds)") if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime: time)) { print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") break @@ -434,11 +437,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { catch { print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") } - self.outputBufferCache.removeFirst(appendedBufferCount) + self.videoPixelBufferCache.removeFirst(appendedBufferCount) } - if !self.outputBufferCache.isEmpty { - // If outputBufferCache has too much buffers, processing current buffers to ease the burden of videoInput, or it will crash + if !self.videoPixelBufferCache.isEmpty { + // If videoPixelBufferCache has too much buffers, processing current buffers to ease the burden of videoInput, or it will crash __appendBuffers() __renderBuffer() __appendBuffers() @@ -495,6 +498,28 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Append buffer directly from CMSampleBuffer public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { + let cache = { + guard self.isRecording, + self.assetWriter.status == .writing, + !self.videoEncodingIsFinished else { + self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") + return + } + + let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + + self.videoSampleBufferCache.add(sampleBuffer) + print("[Caching] cache new video sample buffer at:\(frameTime.seconds)") + if self.videoSampleBufferCache.count >= 13 && self.encodingLiveVideo { + // Be careful of caching too much sample buffers from camera captureOutput. iOS has a hard limit of camera buffer count: 15. + print("WARNING: almost reach system buffer limit: \(self.videoSampleBufferCache.count)/15") + } + while let firstBuffer = self.videoSampleBufferCache.firstObject, CMTimeSubtract(frameTime, CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer)).seconds > self.cacheBuffersDuration { + self.videoSampleBufferCache.removeObject(at: 0) + print("[Caching] caching video duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer).seconds)") + } + } + let work = { defer { if(shouldInvalidateSampleWhenDone) { @@ -514,11 +539,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. guard (frameTime != self.previousFrameTime) else { return } + self.videoSampleBufferCache.add(sampleBuffer) + if (self.previousFrameTime == nil) { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. - self.assetWriter.startSession(atSourceTime: frameTime) - self.startFrameTime = frameTime - self.delegate?.movieOutputDidStartWriting(self, at: frameTime) + let startFrameTime = self.videoSampleBufferCache.firstObject.map { CMSampleBufferGetPresentationTimeStamp($0 as! CMSampleBuffer) } ?? frameTime + self.assetWriter.startSession(atSourceTime: startFrameTime) + self.startFrameTime = startFrameTime + self.delegate?.movieOutputDidStartWriting(self, at: startFrameTime) } self.previousFrameTime = frameTime @@ -528,11 +556,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return } - guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { - print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") - return - } - while(!self.assetWriterVideoInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.videoEncodingIsFinished) { self.synchronizedEncodingDebugPrint("Video waiting...") // Better to poll isReadyForMoreMediaData often since when it does become true @@ -540,25 +563,40 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { usleep(100000) // 0.1 seconds } + var appendedBufferCount = 0 do { - self.synchronizedEncodingDebugPrint("Process frame output") - try NSObject.catchException { - if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime:frameTime)) { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") + // Drain all cached buffers at first + for (i, sampleBufferObject) in self.videoSampleBufferCache.enumerated() { + let sampleBuffer = sampleBufferObject as! CMSampleBuffer + let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + print("appending video sample buffer \(i+1)/\(self.videoSampleBufferCache.count) at:\(time.seconds)") + guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") + break + } + if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime: time)) { + print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") + break + } + appendedBufferCount += 1 + if(self.synchronizedEncodingDebug) { + self.totalFramesAppended += 1 + } } } } catch { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") + print("WARNING: Trouble appending video sample buffer at time: \(frameTime) \(error)") } + self.videoSampleBufferCache.removeObjects(in: NSRange(0.. self.cacheBuffersDuration { + _ = self.audioSampleBufferCache.removeFirst() + print("[Caching] caching audio duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer).seconds)") + } + } + let work = { guard self.isRecording, self.assetWriter.status == .writing, @@ -589,7 +645,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - self.pendingAudioBuffers.append(sampleBuffer) + self.audioSampleBufferCache.append(sampleBuffer) guard (assetWriterAudioInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { print("Had to delay a audio sample at time \(currentSampleTime)") @@ -615,8 +671,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var appendedBufferCount = 0 do { try NSObject.catchException { - for (i, audioBuffer) in self.pendingAudioBuffers.enumerated() { - print("appending audio buffer \(i+1)/\(self.pendingAudioBuffers.count) at:\(CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer).seconds)") + for (i, audioBuffer) in self.audioSampleBufferCache.enumerated() { + print("appending audio buffer \(i+1)/\(self.audioSampleBufferCache.count) at:\(CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer).seconds)") if (!assetWriterAudioInput.append(audioBuffer)) { print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") break @@ -631,19 +687,19 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { catch { print("WARNING: Trouble appending audio sample buffer: \(error)") } - self.pendingAudioBuffers.removeFirst(appendedBufferCount) + self.audioSampleBufferCache.removeFirst(appendedBufferCount) } if(self.encodingLiveVideo) { - movieProcessingContext.runOperationSynchronously(work) + movieProcessingContext.runOperationSynchronously(isCaching ? cache : work) } else { - work() + (isCaching ? cache : work)() } } public func flushPendingAudioBuffers(shouldInvalidateSampleWhenDone: Bool) { - guard let lastBuffer = pendingAudioBuffers.popLast() else { return } + guard let lastBuffer = audioSampleBufferCache.popLast() else { return } processAudioBuffer(lastBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) } From 36d3c063af24d0d26ed353ec6e88e588cd97a9b6 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 13 Jan 2020 15:03:45 +0800 Subject: [PATCH 205/332] chore: update access level --- framework/Source/iOS/MovieOutput.swift | 33 +++++++++++++------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 0b8ec689..04888877 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -45,19 +45,18 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public weak var delegate: MovieOutputDelegate? - let assetWriter:AVAssetWriter + private let assetWriter:AVAssetWriter let assetWriterVideoInput:AVAssetWriterInput var assetWriterAudioInput:AVAssetWriterInput? - - let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor - let size:Size - let colorSwizzlingShader:ShaderProgram - private var isRecording = false + private let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor + public let size: Size + private let colorSwizzlingShader:ShaderProgram + public private(set) var isRecording = false var videoEncodingIsFinished = false var audioEncodingIsFinished = false var markIsFinishedAfterProcessing = false private var startFrameTime: CMTime? - public var recordedDuration: CMTime? + public private(set) var recordedDuration: CMTime? private var previousFrameTime: CMTime? var encodingLiveVideo:Bool { didSet { @@ -68,21 +67,21 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public private(set) var pixelBuffer:CVPixelBuffer? = nil public var dropFirstFrames: Int = 0 public var waitUtilDataIsReadyForLiveVideo = false - let keepLastPixelBuffer: Bool - var renderFramebuffer:Framebuffer! + public let keepLastPixelBuffer: Bool + public private(set) var renderFramebuffer:Framebuffer! - var audioSettings:[String:Any]? = nil - var audioSourceFormatHint:CMFormatDescription? + public private(set) var audioSettings:[String:Any]? = nil + public private(set) var audioSourceFormatHint:CMFormatDescription? - let movieProcessingContext:OpenGLContext - var videoPixelBufferCache = [(CVPixelBuffer, CMTime)]() - var videoSampleBufferCache = NSMutableArray() - var audioSampleBufferCache = [CMSampleBuffer]() + public let movieProcessingContext:OpenGLContext + public private(set) var videoPixelBufferCache = [(CVPixelBuffer, CMTime)]() + public private(set) var videoSampleBufferCache = NSMutableArray() + public private(set) var audioSampleBufferCache = [CMSampleBuffer]() public private(set) var cacheBuffersDuration: TimeInterval = 0 - var isCaching = false + public private(set) var isCaching = false var synchronizedEncodingDebug = false - var totalFramesAppended:Int = 0 + public private(set) var totalFramesAppended:Int = 0 private var observations = [NSKeyValueObservation]() deinit { From 9fcc183988fefd2dfbc0132b02d6a67a7006762b Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 13 Jan 2020 16:10:15 +0800 Subject: [PATCH 206/332] refactor(MovieOutput): change to use MovieOutputState for more concrete state control --- framework/Source/iOS/MovieOutput.swift | 353 ++++++++++++------------- 1 file changed, 164 insertions(+), 189 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 04888877..7187d552 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -38,6 +38,15 @@ public enum MovieOutputError: Error, CustomStringConvertible { } } +public enum MovieOutputState: String { + case unknown + case idle + case caching + case writing + case finished + case canceled +} + public class MovieOutput: ImageConsumer, AudioEncodingTarget { private static let assetWriterQueue = DispatchQueue(label: "com.GPUImage2.MovieOutput.assetWriterQueue", qos: .userInitiated) public let sources = SourceContainer() @@ -51,7 +60,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor public let size: Size private let colorSwizzlingShader:ShaderProgram - public private(set) var isRecording = false var videoEncodingIsFinished = false var audioEncodingIsFinished = false var markIsFinishedAfterProcessing = false @@ -67,7 +75,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public private(set) var pixelBuffer:CVPixelBuffer? = nil public var dropFirstFrames: Int = 0 public var waitUtilDataIsReadyForLiveVideo = false - public let keepLastPixelBuffer: Bool + public private(set) var state = MovieOutputState.unknown public private(set) var renderFramebuffer:Framebuffer! public private(set) var audioSettings:[String:Any]? = nil @@ -78,7 +86,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public private(set) var videoSampleBufferCache = NSMutableArray() public private(set) var audioSampleBufferCache = [CMSampleBuffer]() public private(set) var cacheBuffersDuration: TimeInterval = 0 - public private(set) var isCaching = false var synchronizedEncodingDebug = false public private(set) var totalFramesAppended:Int = 0 @@ -93,7 +100,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } var preferredTransform: CGAffineTransform? - public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil, keepLastPixelBuffer: Bool = false) throws { + public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { print("movie output init \(URL)") @@ -151,10 +158,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.audioSourceFormatHint = audioSourceFormatHint self.movieProcessingContext = movieProcessingContext - self.keepLastPixelBuffer = keepLastPixelBuffer } - public func startRecording(sync: Bool = false, _ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { + public func startRecording(sync: Bool = false, manualControlState: Bool = false, _ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { // Don't do this work on the movieProcessingContext queue so we don't block it. // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. @@ -195,7 +201,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { throw MovieOutputError.pixelBufferPoolNilError } - self.isRecording = true + if !manualControlState { + self.state = .writing + } else { + self.state = .idle + } print("MovieOutput started writing") @@ -218,19 +228,19 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func startCachingWithoutWriting(duration: TimeInterval) { print("MovieOutput starting caching. duration:\(duration)") - isCaching = true + state = .caching cacheBuffersDuration = duration } public func finishCachingAndStartWriting() { print("MovieOutput finish caching and start writing. cached buffer: videoPixelBuffers:\(videoPixelBufferCache.count) audioSampleBuffer:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") - isCaching = false + state = .writing } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { MovieOutput.assetWriterQueue.async { self._cleanBufferCaches() - guard self.isRecording, + guard self.state == .writing, self.assetWriter.status == .writing else { completionCallback?() return @@ -239,7 +249,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.audioEncodingIsFinished = true self.videoEncodingIsFinished = true - self.isRecording = false + self.state = .finished if let lastFrame = self.previousFrameTime { // Resolve black frames at the end. Without this the end timestamp of the session's samples could be either video or audio. @@ -261,19 +271,17 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func cancelRecording(_ completionCallback:(() -> Void)? = nil) { MovieOutput.assetWriterQueue.async { [weak self] in - self?._cleanBufferCaches() - guard let self = self, self.isRecording, - self.assetWriter.status == .writing else { - completionCallback?() - return + guard let self = self else { + completionCallback?() + return } - + self._cleanBufferCaches() + self.state = .canceled self.audioEncodingIsFinished = true self.videoEncodingIsFinished = true - - self.isRecording = false - - self.assetWriter.cancelWriting() + if self.assetWriter.status == .writing { + self.assetWriter.cancelWriting() + } completionCallback?() print("MovieOutput cancel writing") } @@ -290,167 +298,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); - let cache = { [weak self] in - guard let self = self else { return } - // Discard first n frames - if self.dropFirstFrames > 0 { - self.dropFirstFrames -= 1 - self.synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(self.dropFirstFrames)") - return - } - guard self.isRecording, - self.assetWriter.status == .writing, - !self.videoEncodingIsFinished else { - self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") - return - } - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } - if self.keepLastPixelBuffer { - self.pixelBuffer = nil - } - let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) - guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { - print("[Caching] WARNING: Unable to create pixel buffer, dropping frame") - return - } - do { - try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) - self.videoPixelBufferCache.append((self.pixelBuffer!, frameTime)) - print("[Caching] appended new buffer at:\(frameTime.seconds)") - while let firstBufferTime = self.videoPixelBufferCache.first?.1, CMTimeSubtract(frameTime, firstBufferTime).seconds > self.cacheBuffersDuration { - let firstBuffer = self.videoPixelBufferCache.removeFirst() - print("[Caching] caching video duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(firstBuffer.1.seconds)") - } - } catch { - print("[Caching] WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") - } - - CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - if !self.keepLastPixelBuffer { - self.pixelBuffer = nil - } - - sharedImageProcessingContext.runOperationAsynchronously { - framebuffer.unlock() - } - } - let work = { [weak self] in - guard let self = self else { return } - // Discard first n frames - if self.dropFirstFrames > 0 { - self.dropFirstFrames -= 1 - self.synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(self.dropFirstFrames)") - return - } - - guard self.isRecording, - self.assetWriter.status == .writing, - !self.videoEncodingIsFinished else { - self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") - return - } - - // Ignore still images and other non-video updates (do I still need this?) - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } - - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. - guard (frameTime != self.previousFrameTime) else { return } - - if (self.previousFrameTime == nil) { - // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. - let startFrameTime = self.videoPixelBufferCache.first?.1 ?? frameTime - self.assetWriter.startSession(atSourceTime: startFrameTime) - self.startFrameTime = startFrameTime - self.delegate?.movieOutputDidStartWriting(self, at: startFrameTime) - } - - self.previousFrameTime = frameTime - - guard (self.assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { - print("WARNING: Had to drop a frame at time \(frameTime)") - return - } - - while(!self.assetWriterVideoInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.videoEncodingIsFinished) { - self.synchronizedEncodingDebugPrint("Video waiting...") - // Better to poll isReadyForMoreMediaData often since when it does become true - // we don't want to risk letting framebuffers pile up in between poll intervals. - usleep(100000) // 0.1 seconds - if self.markIsFinishedAfterProcessing { - self.synchronizedEncodingDebugPrint("set videoEncodingIsFinished to true after processing") - self.markIsFinishedAfterProcessing = false - self.videoEncodingIsFinished = true - } - } - - if self.keepLastPixelBuffer { - self.pixelBuffer = nil - } - - func __renderBuffer() { - let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) - guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { - print("WARNING: Unable to create pixel buffer, dropping frame") - return - } - do { - try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) - self.videoPixelBufferCache.append((self.pixelBuffer!, frameTime)) - self.synchronizedEncodingDebugPrint("Process frame output. Time:\(CMTimeGetSeconds(frameTime))") - } - catch { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") - } - - CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - if !self.keepLastPixelBuffer { - self.pixelBuffer = nil - } - - sharedImageProcessingContext.runOperationAsynchronously { - framebuffer.unlock() - } - } - - func __appendBuffers() { - var appendedBufferCount = 0 - do { - try NSObject.catchException { - // Drain all cached buffers at first - if !self.videoPixelBufferCache.isEmpty { - for (i, (buffer, time)) in self.videoPixelBufferCache.enumerated() { - print("appending video pixel buffer \(i+1)/\(self.videoPixelBufferCache.count) at:\(time.seconds)") - if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime: time)) { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") - break - } - appendedBufferCount += 1 - if(self.synchronizedEncodingDebug) { - self.totalFramesAppended += 1 - } - } - } - } - } - catch { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") - } - self.videoPixelBufferCache.removeFirst(appendedBufferCount) - } - - if !self.videoPixelBufferCache.isEmpty { - // If videoPixelBufferCache has too much buffers, processing current buffers to ease the burden of videoInput, or it will crash - __appendBuffers() - __renderBuffer() - __appendBuffers() + if self?.state == .caching { + self?._renderAndCache(framebuffer: framebuffer) } else { - __renderBuffer() - __appendBuffers() + self?._processPixelBufferCache(framebuffer: framebuffer) } - - if self.videoEncodingIsFinished { - self.assetWriterVideoInput.markAsFinished() + sharedImageProcessingContext.runOperationAsynchronously { + framebuffer.unlock() } } @@ -458,14 +313,134 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext que // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur // since the framebuffers come in much slower than during synchronized encoding. - movieProcessingContext.runOperationAsynchronously(isCaching ? cache : work) + movieProcessingContext.runOperationAsynchronously(work) } else { // This is done synchronously to prevent framebuffers from piling up during synchronized encoding. // If we don't force the sharedImageProcessingContext queue to wait for this frame to finish processing it will // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext queue would run when the system wants it to. - movieProcessingContext.runOperationSynchronously(isCaching ? cache : work) + movieProcessingContext.runOperationSynchronously(work) + } + } + + private func _renderAndCache(framebuffer: Framebuffer) { + // Discard first n frames + if dropFirstFrames > 0 { + dropFirstFrames -= 1 + synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(dropFirstFrames)") + return + } + guard state == .caching || state == .writing, assetWriter.status == .writing, !videoEncodingIsFinished else { + synchronizedEncodingDebugPrint("Guard fell through, dropping frame") + return + } + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } + pixelBuffer = nil + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) + guard pixelBuffer != nil && pixelBufferStatus == kCVReturnSuccess else { + print("[Caching] WARNING: Unable to create pixel buffer, dropping frame") + return + } + do { + try renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) + videoPixelBufferCache.append((pixelBuffer!, frameTime)) + print("[Caching] appended new buffer at:\(frameTime.seconds)") + while let firstBufferTime = videoPixelBufferCache.first?.1, CMTimeSubtract(frameTime, firstBufferTime).seconds > cacheBuffersDuration { + let firstBuffer = videoPixelBufferCache.removeFirst() + print("[Caching] caching video duration reach up to:\(cacheBuffersDuration) dropped frame at:\(firstBuffer.1.seconds)") + } + } catch { + print("[Caching] WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") + } + + CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + } + + private func _processPixelBufferCache(framebuffer: Framebuffer) { + // Discard first n frames + if dropFirstFrames > 0 { + dropFirstFrames -= 1 + synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(self.dropFirstFrames)") + return + } + + guard state == .caching || state == .writing, assetWriter.status == .writing, !videoEncodingIsFinished else { + synchronizedEncodingDebugPrint("Guard fell through, dropping frame") + return + } + + // Ignore still images and other non-video updates (do I still need this?) + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard (frameTime != previousFrameTime) else { return } + + if (previousFrameTime == nil) { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. + let startFrameTime = videoPixelBufferCache.first?.1 ?? frameTime + assetWriter.startSession(atSourceTime: startFrameTime) + self.startFrameTime = startFrameTime + delegate?.movieOutputDidStartWriting(self, at: startFrameTime) + } + + previousFrameTime = frameTime + + guard (assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { + print("WARNING: Had to drop a frame at time \(frameTime)") + return + } + + while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { + synchronizedEncodingDebugPrint("Video waiting...") + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds + if markIsFinishedAfterProcessing { + synchronizedEncodingDebugPrint("set videoEncodingIsFinished to true after processing") + markIsFinishedAfterProcessing = false + videoEncodingIsFinished = true + } + } + + if !videoPixelBufferCache.isEmpty { + // If videoPixelBufferCache has too much buffers, processing current buffers to ease the burden of videoInput, or it will crash + _appendPixelBuffersFromCache() + _renderAndCache(framebuffer: framebuffer) + _appendPixelBuffersFromCache() + } else { + _renderAndCache(framebuffer: framebuffer) + _appendPixelBuffersFromCache() + } + + if videoEncodingIsFinished { + assetWriterVideoInput.markAsFinished() + } + } + + private func _appendPixelBuffersFromCache() { + var appendedBufferCount = 0 + do { + try NSObject.catchException { + // Drain all cached buffers at first + if !self.videoPixelBufferCache.isEmpty { + for (i, (buffer, time)) in self.videoPixelBufferCache.enumerated() { + print("appending video pixel buffer \(i+1)/\(self.videoPixelBufferCache.count) at:\(time.seconds)") + if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime: time)) { + print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") + break + } + appendedBufferCount += 1 + if(self.synchronizedEncodingDebug) { + self.totalFramesAppended += 1 + } + } + } + } + } + catch { + print("WARNING: Trouble appending pixel buffer \(error)") } + videoPixelBufferCache.removeFirst(appendedBufferCount) } func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) throws { @@ -498,7 +473,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Append buffer directly from CMSampleBuffer public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { let cache = { - guard self.isRecording, + guard self.state == .caching || self.state == .writing, self.assetWriter.status == .writing, !self.videoEncodingIsFinished else { self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") @@ -526,7 +501,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } - guard self.isRecording, + guard self.state == .caching || self.state == .writing, self.assetWriter.status == .writing, !self.videoEncodingIsFinished else { self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") @@ -592,10 +567,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if(self.encodingLiveVideo) { - movieProcessingContext.runOperationSynchronously(isCaching ? cache : work) + movieProcessingContext.runOperationSynchronously(state == .caching ? cache : work) } else { - (isCaching ? cache : work)() + (state == .caching ? cache : work)() } } @@ -614,7 +589,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { let cache = { - guard self.isRecording, + guard self.state == .caching || self.state == .writing, self.assetWriter.status == .writing, !self.audioEncodingIsFinished else { self.synchronizedEncodingDebugPrint("Guard fell through, dropping audio sample") @@ -632,7 +607,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } let work = { - guard self.isRecording, + guard self.state == .caching || self.state == .writing, self.assetWriter.status == .writing, !self.audioEncodingIsFinished, let assetWriterAudioInput = self.assetWriterAudioInput else { @@ -690,10 +665,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if(self.encodingLiveVideo) { - movieProcessingContext.runOperationSynchronously(isCaching ? cache : work) + movieProcessingContext.runOperationSynchronously(state == .caching ? cache : work) } else { - (isCaching ? cache : work)() + (state == .caching ? cache : work)() } } From 64f8b575a721ec3b6ffacfb224b4b6ecb7e0a2ec Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 13 Jan 2020 17:49:49 +0800 Subject: [PATCH 207/332] chore: disable debug log and fix possible leak --- framework/Source/iOS/MovieOutput.swift | 54 ++++++++++++++------------ 1 file changed, 29 insertions(+), 25 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 7187d552..1a80487b 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -164,8 +164,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Don't do this work on the movieProcessingContext queue so we don't block it. // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. - let block = { () -> Void in + let block = { [weak self] () -> Void in do { + guard let self = self else { return } guard self.assetWriter.status != .cancelled else { throw MovieOutputError.startWritingError(assetWriterError: nil) } @@ -211,7 +212,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { completionCallback?(true, nil) } catch { - self.assetWriter.cancelWriting() + self?.assetWriter.cancelWriting() print("MovieOutput failed to start writing. error:\(error)") @@ -238,9 +239,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { - MovieOutput.assetWriterQueue.async { - self._cleanBufferCaches() - guard self.state == .writing, + MovieOutput.assetWriterQueue.async { [weak self] in + self?._cleanBufferCaches() + guard let self = self, self.state == .writing, self.assetWriter.status == .writing else { completionCallback?() return @@ -344,10 +345,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { do { try renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) videoPixelBufferCache.append((pixelBuffer!, frameTime)) - print("[Caching] appended new buffer at:\(frameTime.seconds)") +// print("[Caching] appended new buffer at:\(frameTime.seconds)") while let firstBufferTime = videoPixelBufferCache.first?.1, CMTimeSubtract(frameTime, firstBufferTime).seconds > cacheBuffersDuration { - let firstBuffer = videoPixelBufferCache.removeFirst() - print("[Caching] caching video duration reach up to:\(cacheBuffersDuration) dropped frame at:\(firstBuffer.1.seconds)") + _ = videoPixelBufferCache.removeFirst() +// print("[Caching] caching video duration reach up to:\(cacheBuffersDuration) dropped frame at:\(firstBuffer.1.seconds)") } } catch { print("[Caching] WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") @@ -360,7 +361,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Discard first n frames if dropFirstFrames > 0 { dropFirstFrames -= 1 - synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(self.dropFirstFrames)") + synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(dropFirstFrames)") return } @@ -385,7 +386,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { previousFrameTime = frameTime - guard (assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { + guard assetWriterVideoInput.isReadyForMoreMediaData || shouldWaitForEncoding else { print("WARNING: Had to drop a frame at time \(frameTime)") return } @@ -423,8 +424,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { try NSObject.catchException { // Drain all cached buffers at first if !self.videoPixelBufferCache.isEmpty { - for (i, (buffer, time)) in self.videoPixelBufferCache.enumerated() { - print("appending video pixel buffer \(i+1)/\(self.videoPixelBufferCache.count) at:\(time.seconds)") + for (_, (buffer, time)) in self.videoPixelBufferCache.enumerated() { +// debugPrint("appending video pixel buffer \(i+1)/\(self.videoPixelBufferCache.count) at:\(time.seconds)") if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime: time)) { print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") break @@ -472,7 +473,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Append buffer directly from CMSampleBuffer public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { - let cache = { + let cache = { [weak self] in + guard let self = self else { return } guard self.state == .caching || self.state == .writing, self.assetWriter.status == .writing, !self.videoEncodingIsFinished else { @@ -483,24 +485,24 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) self.videoSampleBufferCache.add(sampleBuffer) - print("[Caching] cache new video sample buffer at:\(frameTime.seconds)") +// debugPrint("[Caching] cache new video sample buffer at:\(frameTime.seconds)") if self.videoSampleBufferCache.count >= 13 && self.encodingLiveVideo { // Be careful of caching too much sample buffers from camera captureOutput. iOS has a hard limit of camera buffer count: 15. - print("WARNING: almost reach system buffer limit: \(self.videoSampleBufferCache.count)/15") +// debugPrint("WARNING: almost reach system buffer limit: \(self.videoSampleBufferCache.count)/15") } while let firstBuffer = self.videoSampleBufferCache.firstObject, CMTimeSubtract(frameTime, CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer)).seconds > self.cacheBuffersDuration { self.videoSampleBufferCache.removeObject(at: 0) - print("[Caching] caching video duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer).seconds)") +// debugPrint("[Caching] caching video duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer).seconds)") } } - let work = { + let work = { [weak self] in defer { if(shouldInvalidateSampleWhenDone) { CMSampleBufferInvalidate(sampleBuffer) } } - + guard let self = self else { return } guard self.state == .caching || self.state == .writing, self.assetWriter.status == .writing, !self.videoEncodingIsFinished else { @@ -544,7 +546,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { for (i, sampleBufferObject) in self.videoSampleBufferCache.enumerated() { let sampleBuffer = sampleBufferObject as! CMSampleBuffer let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - print("appending video sample buffer \(i+1)/\(self.videoSampleBufferCache.count) at:\(time.seconds)") + debugPrint("appending video sample buffer \(i+1)/\(self.videoSampleBufferCache.count) at:\(time.seconds)") guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") break @@ -566,7 +568,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.videoSampleBufferCache.removeObjects(in: NSRange(0.. self.cacheBuffersDuration { _ = self.audioSampleBufferCache.removeFirst() - print("[Caching] caching audio duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer).seconds)") +// debugPrint("[Caching] caching audio duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer).seconds)") } } - let work = { + let work = { [weak self] in + guard let self = self else { return } guard self.state == .caching || self.state == .writing, self.assetWriter.status == .writing, !self.audioEncodingIsFinished, @@ -645,8 +649,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var appendedBufferCount = 0 do { try NSObject.catchException { - for (i, audioBuffer) in self.audioSampleBufferCache.enumerated() { - print("appending audio buffer \(i+1)/\(self.audioSampleBufferCache.count) at:\(CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer).seconds)") + for (_, audioBuffer) in self.audioSampleBufferCache.enumerated() { +// debugPrint("[Caching] appending audio buffer \(i+1)/\(self.audioSampleBufferCache.count) at:\(CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer).seconds)") if (!assetWriterAudioInput.append(audioBuffer)) { print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") break From 6e3d11fe2bde54f1658fdb89e7a6a65d9cb41504 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 14 Jan 2020 23:11:12 +0800 Subject: [PATCH 208/332] fix(MovieOutput): fix unexpected retain by Objc-c exception and result in memory leak --- framework/Source/iOS/MovieOutput.swift | 119 ++++++++++++++----------- 1 file changed, 67 insertions(+), 52 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 1a80487b..9179023c 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -310,7 +310,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } - if(self.encodingLiveVideo) { + if encodingLiveVideo { // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext que // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur // since the framebuffers come in much slower than during synchronized encoding. @@ -421,24 +421,30 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private func _appendPixelBuffersFromCache() { var appendedBufferCount = 0 do { - try NSObject.catchException { - // Drain all cached buffers at first - if !self.videoPixelBufferCache.isEmpty { - for (_, (buffer, time)) in self.videoPixelBufferCache.enumerated() { -// debugPrint("appending video pixel buffer \(i+1)/\(self.videoPixelBufferCache.count) at:\(time.seconds)") - if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime: time)) { - print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") - break - } - appendedBufferCount += 1 - if(self.synchronizedEncodingDebug) { - self.totalFramesAppended += 1 - } + // Drain all cached buffers at first + if !videoPixelBufferCache.isEmpty { + for (_, (buffer, time)) in videoPixelBufferCache.enumerated() { + // debugPrint("appending video pixel buffer \(i+1)/\(self.videoPixelBufferCache.count) at:\(time.seconds)") + if !assetWriterVideoInput.isReadyForMoreMediaData { + // Avoid error when calling bufferInput.append + print("WARNING: video input is not ready at time: \(time))") + break + } + let bufferInput = assetWriterPixelBufferInput + var appendResult = false + // NOTE: when NSException was triggered within NSObject.catchException, the object inside the block seems cannot be released correctly, so be careful not to trigger error, or directly use "self." + try NSObject.catchException { + appendResult = bufferInput.append(buffer, withPresentationTime: time) + } + if !appendResult { + print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") + break } + appendedBufferCount += 1 + self.totalFramesAppended += 1 } } - } - catch { + } catch { print("WARNING: Trouble appending pixel buffer \(error)") } videoPixelBufferCache.removeFirst(appendedBufferCount) @@ -541,28 +547,32 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var appendedBufferCount = 0 do { - try NSObject.catchException { - // Drain all cached buffers at first - for (i, sampleBufferObject) in self.videoSampleBufferCache.enumerated() { - let sampleBuffer = sampleBufferObject as! CMSampleBuffer - let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - debugPrint("appending video sample buffer \(i+1)/\(self.videoSampleBufferCache.count) at:\(time.seconds)") - guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { - print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") - break - } - if (!self.assetWriterPixelBufferInput.append(buffer, withPresentationTime: time)) { - print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") - break - } - appendedBufferCount += 1 - if(self.synchronizedEncodingDebug) { - self.totalFramesAppended += 1 - } + // Drain all cached buffers at first + for (i, sampleBufferObject) in self.videoSampleBufferCache.enumerated() { + let sampleBuffer = sampleBufferObject as! CMSampleBuffer + let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + debugPrint("appending video sample buffer \(i+1)/\(self.videoSampleBufferCache.count) at:\(time.seconds)") + guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") + break + } + if !self.assetWriterVideoInput.isReadyForMoreMediaData { + print("WARNING: video input is not ready at time: \(time))") + break + } + let bufferInput = self.assetWriterPixelBufferInput + var appendResult = false + try NSObject.catchException { + appendResult = bufferInput.append(buffer, withPresentationTime: time) } + if (!appendResult) { + print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") + break + } + appendedBufferCount += 1 + self.totalFramesAppended += 1 } - } - catch { + } catch { print("WARNING: Trouble appending video sample buffer at time: \(frameTime) \(error)") } self.videoSampleBufferCache.removeObjects(in: NSRange(0.. Date: Tue, 14 Jan 2020 23:27:25 +0800 Subject: [PATCH 209/332] improve(MovieOutput): make sure all buffers are drained when finished recording --- framework/Source/iOS/MovieOutput.swift | 169 ++++++++++++++----------- 1 file changed, 98 insertions(+), 71 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 9179023c..4d5abf16 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -86,6 +86,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public private(set) var videoSampleBufferCache = NSMutableArray() public private(set) var audioSampleBufferCache = [CMSampleBuffer]() public private(set) var cacheBuffersDuration: TimeInterval = 0 + var shouldInvalidateAudioSampleWhenDone = false var synchronizedEncodingDebug = false public private(set) var totalFramesAppended:Int = 0 @@ -221,9 +222,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if sync { - block() + _writerSync(operation: block) } else { - MovieOutput.assetWriterQueue.async(execute: block) + _writerAsync(operation: block) } } @@ -239,8 +240,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { - MovieOutput.assetWriterQueue.async { [weak self] in - self?._cleanBufferCaches() + _writerAsync { [weak self] in + self?._cleanBufferCaches(shouldAppend: true) guard let self = self, self.state == .writing, self.assetWriter.status == .writing else { completionCallback?() @@ -271,12 +272,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func cancelRecording(_ completionCallback:(() -> Void)? = nil) { - MovieOutput.assetWriterQueue.async { [weak self] in + _writerAsync { [weak self] in + self?._cleanBufferCaches(shouldAppend: false) guard let self = self else { completionCallback?() return } - self._cleanBufferCaches() self.state = .canceled self.audioEncodingIsFinished = true self.videoEncodingIsFinished = true @@ -288,11 +289,17 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } - private func _cleanBufferCaches() { - movieProcessingContext.runOperationAsynchronously { [weak self] in - self?.videoPixelBufferCache.removeAll() - self?.videoSampleBufferCache.removeAllObjects() - self?.audioSampleBufferCache.removeAll() + private func _cleanBufferCaches(shouldAppend: Bool) { + print("[Caching] Drain all buffers videoPixelBuffers:\(videoPixelBufferCache.count) audioSampleBuffer:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") + movieProcessingContext.runOperationSynchronously { + if shouldAppend { + self._appendPixelBuffersFromCache() + self._appendAudioBuffersFromCache() + self._appendVideoSampleBuffersFromCache() + } + self.videoPixelBufferCache.removeAll() + self.videoSampleBufferCache.removeAllObjects() + self.audioSampleBufferCache.removeAll() } } @@ -544,38 +551,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // we don't want to risk letting framebuffers pile up in between poll intervals. usleep(100000) // 0.1 seconds } - - var appendedBufferCount = 0 - do { - // Drain all cached buffers at first - for (i, sampleBufferObject) in self.videoSampleBufferCache.enumerated() { - let sampleBuffer = sampleBufferObject as! CMSampleBuffer - let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - debugPrint("appending video sample buffer \(i+1)/\(self.videoSampleBufferCache.count) at:\(time.seconds)") - guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { - print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") - break - } - if !self.assetWriterVideoInput.isReadyForMoreMediaData { - print("WARNING: video input is not ready at time: \(time))") - break - } - let bufferInput = self.assetWriterPixelBufferInput - var appendResult = false - try NSObject.catchException { - appendResult = bufferInput.append(buffer, withPresentationTime: time) - } - if (!appendResult) { - print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") - break - } - appendedBufferCount += 1 - self.totalFramesAppended += 1 - } - } catch { - print("WARNING: Trouble appending video sample buffer at time: \(frameTime) \(error)") - } - self.videoSampleBufferCache.removeObjects(in: NSRange(0.. Void) { + MovieOutput.assetWriterQueue.async { [weak self] in + self?.movieProcessingContext.runOperationSynchronously(operation) + } + } + + private func _writerSync(operation: @escaping () -> Void) { + MovieOutput.assetWriterQueue.sync { [weak self] in + self?.movieProcessingContext.runOperationSynchronously(operation) + } + } } From 56e837d3bbe9c74fb17fc0260168175b1fa5c5da Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 15 Jan 2020 13:02:07 +0800 Subject: [PATCH 210/332] fix: possible crash when quickly change filters chain --- framework/Source/Pipeline.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 1dd80740..f9efa2e2 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -232,6 +232,7 @@ public class ImageRelay: ImageProcessingOperation { } public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + guard sources.sources.count > 0 else { return } sources.sources[0]?.transmitPreviousImage(to:self, atIndex:0) } From 6fb67589f920f18456b6e407833a86b4800ada2b Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 16 Jan 2020 12:39:00 +0800 Subject: [PATCH 211/332] improve(MoviePlayer): retry playing when inconsistent state detected --- framework/Source/iOS/MoviePlayer.swift | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 25ebbe36..1d17f325 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -97,6 +97,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { return false } private var didTriggerEndTimeObserver = false + private var retryPlaying = false public override init() { print("movie player init") @@ -527,6 +528,16 @@ private extension MoviePlayer { } @objc func displayLinkCallback(displayLink: CADisplayLink) { + if !retryPlaying && isPlaying && items().isEmpty { + print("Items is empty when playing. Retry playing") + retryPlaying = true + replayLastItem() + } else if !items().isEmpty { + if retryPlaying { + retryPlaying = false + print("Resume playing succeed") + } + } guard currentItem?.status == .readyToPlay else { return } let playTime = currentTime() guard playTime.seconds > 0 else { return } From 74e41871b83b711eda608eeaf2c980715cc133df Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 16 Jan 2020 17:05:37 +0800 Subject: [PATCH 212/332] improve(MovieOutput): exposure url property --- framework/Source/iOS/MovieOutput.swift | 2 ++ 1 file changed, 2 insertions(+) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 4d5abf16..ee98642d 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -54,6 +54,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public weak var delegate: MovieOutputDelegate? + public let url: URL private let assetWriter:AVAssetWriter let assetWriterVideoInput:AVAssetWriterInput var assetWriterAudioInput:AVAssetWriterInput? @@ -104,6 +105,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { print("movie output init \(URL)") + self.url = URL imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup let movieProcessingContext = OpenGLContext() From 23b997ebaadf2b461c32cbfd84653b02db163379 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 21 Jan 2020 19:34:53 +0800 Subject: [PATCH 213/332] fix(MovieOutput): fix exception when finished writing --- framework/Source/iOS/MovieOutput.swift | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index ee98642d..d6f845ed 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -428,6 +428,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } private func _appendPixelBuffersFromCache() { + guard state == .writing, assetWriter.status == .writing else { return } var appendedBufferCount = 0 do { // Drain all cached buffers at first @@ -564,6 +565,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } private func _appendVideoSampleBuffersFromCache() { + guard state == .writing, assetWriter.status == .writing else { return } var appendedBufferCount = 0 var time: CMTime = .zero do { @@ -679,7 +681,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } private func _appendAudioBuffersFromCache() { - guard let audioInput = assetWriterAudioInput else { return } + guard let audioInput = assetWriterAudioInput, state == .writing, assetWriter.status == .writing else { return } var appendedBufferCount = 0 do { for (_, audioBuffer) in audioSampleBufferCache.enumerated() { From 3a207ad984570c46bf73da46de028ef965e0c823 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 4 Feb 2020 15:43:54 +0800 Subject: [PATCH 214/332] chore(MovieOutput): improve logging --- framework/Source/iOS/MovieOutput.swift | 87 +++++++++++++------------- 1 file changed, 43 insertions(+), 44 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index d6f845ed..45fc5d4b 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -283,14 +283,22 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.state = .canceled self.audioEncodingIsFinished = true self.videoEncodingIsFinished = true + print("MovieOutput cancel writing, state:\(self.assetWriter.status.rawValue)") if self.assetWriter.status == .writing { self.assetWriter.cancelWriting() } completionCallback?() - print("MovieOutput cancel writing") } } + private func _shouldProcessVideoBuffer() -> Bool { + guard state == .caching || state == .writing, assetWriter.status == .writing, !videoEncodingIsFinished else { + print("Guard fell through, dropping video frame. state:\(self.state) writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") + return false + } + return true + } + private func _cleanBufferCaches(shouldAppend: Bool) { print("[Caching] Drain all buffers videoPixelBuffers:\(videoPixelBufferCache.count) audioSampleBuffer:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") movieProcessingContext.runOperationSynchronously { @@ -337,13 +345,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Discard first n frames if dropFirstFrames > 0 { dropFirstFrames -= 1 - synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(dropFirstFrames)") - return - } - guard state == .caching || state == .writing, assetWriter.status == .writing, !videoEncodingIsFinished else { - synchronizedEncodingDebugPrint("Guard fell through, dropping frame") + print("Drop one frame. Left dropFirstFrames:\(dropFirstFrames)") return } + guard _shouldProcessVideoBuffer() else { return } guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } pixelBuffer = nil let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) @@ -370,17 +375,17 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Discard first n frames if dropFirstFrames > 0 { dropFirstFrames -= 1 - synchronizedEncodingDebugPrint("Drop one frame. Left dropFirstFrames:\(dropFirstFrames)") + print("Drop one frame. Left dropFirstFrames:\(dropFirstFrames)") return } - guard state == .caching || state == .writing, assetWriter.status == .writing, !videoEncodingIsFinished else { - synchronizedEncodingDebugPrint("Guard fell through, dropping frame") - return - } + guard _shouldProcessVideoBuffer() else { return } // Ignore still images and other non-video updates (do I still need this?) - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { + print("Cannot get timestamp from framebuffer, dropping frame") + return + } // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. guard (frameTime != previousFrameTime) else { return } @@ -491,12 +496,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { let cache = { [weak self] in guard let self = self else { return } - guard self.state == .caching || self.state == .writing, - self.assetWriter.status == .writing, - !self.videoEncodingIsFinished else { - self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") - return - } + guard self._shouldProcessVideoBuffer() else { return } let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) @@ -519,17 +519,15 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } guard let self = self else { return } - guard self.state == .caching || self.state == .writing, - self.assetWriter.status == .writing, - !self.videoEncodingIsFinished else { - self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") - return - } + guard self._shouldProcessVideoBuffer() else { return } let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. - guard (frameTime != self.previousFrameTime) else { return } + guard (frameTime != self.previousFrameTime) else { + print("Cannot get timestamp from framebuffer, dropping frame") + return + } self.videoSampleBufferCache.add(sampleBuffer) @@ -613,18 +611,23 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } + private func _shouldProcessAudioBuffer() -> Bool { + guard state == .caching || state == .writing, assetWriter.status == .writing, !audioEncodingIsFinished else { + print("Guard fell through, dropping audio sample, state:\(self.state) writer.state:\(self.assetWriter.status.rawValue) audioEncodingIsFinished:\(self.audioEncodingIsFinished)") + return false + } + return true + } + public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { shouldInvalidateAudioSampleWhenDone = shouldInvalidateSampleWhenDone let cache = { [weak self] in guard let self = self else { return } - guard self.state == .caching || self.state == .writing, - self.assetWriter.status == .writing, - !self.audioEncodingIsFinished else { - self.synchronizedEncodingDebugPrint("Guard fell through, dropping audio sample") - if shouldInvalidateSampleWhenDone { - CMSampleBufferInvalidate(sampleBuffer) - } - return + guard self._shouldProcessAudioBuffer() else { + if shouldInvalidateSampleWhenDone { + CMSampleBufferInvalidate(sampleBuffer) + } + return } let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) self.audioSampleBufferCache.append(sampleBuffer) @@ -636,15 +639,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let work = { [weak self] in guard let self = self else { return } - guard self.state == .caching || self.state == .writing, - self.assetWriter.status == .writing, - !self.audioEncodingIsFinished, - let assetWriterAudioInput = self.assetWriterAudioInput else { - self.synchronizedEncodingDebugPrint("Guard fell through, dropping audio sample") - if shouldInvalidateSampleWhenDone { - CMSampleBufferInvalidate(sampleBuffer) - } - return + guard self._shouldProcessAudioBuffer(), let assetWriterAudioInput = self.assetWriterAudioInput else { + if shouldInvalidateSampleWhenDone { + CMSampleBufferInvalidate(sampleBuffer) + } + return } let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) @@ -656,7 +655,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } while(!assetWriterAudioInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.audioEncodingIsFinished) { - self.synchronizedEncodingDebugPrint("Audio waiting...") + print("Audio waiting...") usleep(100000) if !assetWriterAudioInput.isReadyForMoreMediaData { self.synchronizedEncodingDebugPrint("Audio still not ready, skip this runloop...") @@ -665,7 +664,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } guard self.previousFrameTime != nil else { - self.synchronizedEncodingDebugPrint("Add audio sample to pending queue but first video frame is not ready yet. Time:\(CMTimeGetSeconds(currentSampleTime))") + print("Add audio sample to pending queue but first video frame is not ready yet. Time:\(CMTimeGetSeconds(currentSampleTime))") return } From f8274cbc1cf42083478feeb484ab7a51940695ae Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 11 Feb 2020 15:09:51 +0800 Subject: [PATCH 215/332] improve(movieoutput): add more log for dropping frames --- framework/Source/iOS/MovieOutput.swift | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 45fc5d4b..b21c9c4f 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -316,6 +316,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); + if previousFrameTime == nil { + debugPrint("starting process new framebuffer when previousFrameTime == nil") + } + let work = { [weak self] in if self?.state == .caching { self?._renderAndCache(framebuffer: framebuffer) @@ -349,7 +353,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return } guard _shouldProcessVideoBuffer() else { return } - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { + print("Cannot get timestamp from framebuffer, dropping frame") + return + } pixelBuffer = nil let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) guard pixelBuffer != nil && pixelBufferStatus == kCVReturnSuccess else { @@ -372,6 +379,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } private func _processPixelBufferCache(framebuffer: Framebuffer) { + if previousFrameTime == nil { + debugPrint("Got a new framebuffer when previousFrameTime is nil") + } // Discard first n frames if dropFirstFrames > 0 { dropFirstFrames -= 1 @@ -388,13 +398,17 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. - guard (frameTime != previousFrameTime) else { return } + guard (frameTime != previousFrameTime) else { + print("WARNING: frameTime is as same as previousFrameTIme") + return + } if (previousFrameTime == nil) { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. let startFrameTime = videoPixelBufferCache.first?.1 ?? frameTime assetWriter.startSession(atSourceTime: startFrameTime) self.startFrameTime = startFrameTime + print("did start writing at:\(startFrameTime.seconds)") delegate?.movieOutputDidStartWriting(self, at: startFrameTime) } From 125458fc1e223279632f74144d9c7f767e309526 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 11 Feb 2020 19:10:12 +0800 Subject: [PATCH 216/332] improve(movieoutput): keep only useful log --- framework/Source/iOS/MovieOutput.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index b21c9c4f..3eb056db 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -316,7 +316,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish(); - if previousFrameTime == nil { + if previousFrameTime == nil && videoSampleBufferCache.count <= 0 && videoPixelBufferCache.isEmpty && (state == .caching || state == .writing) { debugPrint("starting process new framebuffer when previousFrameTime == nil") } From 36eac01f7c5a0f3d779c65884c4b89bc82375fcf Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Mon, 24 Feb 2020 11:33:18 +0800 Subject: [PATCH 217/332] fix(transcoding): fix transcode only task not emit progress. --- framework/Source/iOS/MovieInput.swift | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 6cd777cc..43f87dfc 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -373,11 +373,6 @@ public class MovieInput: ImageSource { var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) currentTime = currentSampleTime - if transcodingOnly, let movieOutput = synchronizedMovieOutput { - movieOutput.processVideoBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) - return - } - var duration = asset.duration // Only used for the progress block so its acuracy is not critical if let startTime = startTime { // Make sure our samples start at kCMTimeZero if the video was started midway. @@ -389,13 +384,18 @@ public class MovieInput: ImageSource { } } + progress?(currentSampleTime.seconds/duration.seconds) + + if transcodingOnly, let movieOutput = synchronizedMovieOutput { + movieOutput.processVideoBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) + return + } + // NOTE: When calculating frame pre second, floating point maybe rounded, so we have to add tolerance manually if let fps = maxFPS, let currentTime = currentTime, (currentSampleTime.seconds - currentTime.seconds) < 1 / Double(fps) - 0.0000001 { return } - progress?(currentSampleTime.seconds/duration.seconds) - if synchronizedMovieOutput != nil { // For synchrozied transcoding, separate AVAssetReader thread and OpenGL thread to improve performance sharedImageProcessingContext.runOperationAsynchronously { [weak self] in From fc40ff0abf8adde947744082632ef0797ec62293 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 25 Feb 2020 01:32:45 +0800 Subject: [PATCH 218/332] feat(lut): support CPU LUT filter when transcoding --- framework/Source/iOS/CILookupFilter.swift | 145 ++++++++++++++++++++++ framework/Source/iOS/MovieOutput.swift | 43 ++++++- 2 files changed, 187 insertions(+), 1 deletion(-) create mode 100644 framework/Source/iOS/CILookupFilter.swift diff --git a/framework/Source/iOS/CILookupFilter.swift b/framework/Source/iOS/CILookupFilter.swift new file mode 100644 index 00000000..32321e2b --- /dev/null +++ b/framework/Source/iOS/CILookupFilter.swift @@ -0,0 +1,145 @@ +// +// CILookupFilter.swift +// DayCam +// +// Created by 陈品霖 on 2020/2/23. +// Copyright © 2020 rocry. All rights reserved. +// + +import Foundation + +public class CILookupFilter { + private var lutFilter: CIFilter? + public private(set) var intensity: Double? + public private(set) var brightnessFactor: Double? + // Use "ColorMatrix(Alpha) + Composite" filters for color LUT + private var alphaFilter: CIFilter? + private var compositeFilter: CIFilter? + private lazy var alphaColorMatrix = [CGFloat]() + // Use "ColorControl(Brightness)" filter for black and white LUT + private var brightnessFilter: CIFilter? + + init(lutImage: UIImage, intensity: Double? = nil, brightnessFactor: Double? = nil) { + self.intensity = intensity + self.brightnessFactor = brightnessFactor + lutFilter = CIFilter.filter(with: lutImage) + if let intensity = intensity { + if let factor = brightnessFactor { + brightnessFilter = CIFilter(name: "CIColorControls") + brightnessFilter?.setDefaults() + let adjustedBrightness = -factor + factor * intensity + brightnessFilter?.setValue(NSNumber(value: adjustedBrightness), forKey: kCIInputBrightnessKey) + } else { + alphaColorMatrix = [0, 0, 0, CGFloat(intensity)] + alphaFilter = CIFilter(name: "CIColorMatrix") + alphaFilter?.setDefaults() + alphaFilter?.setValue(CIVector(values: &alphaColorMatrix, count: 4), forKey: "inputAVector") + + compositeFilter = CIFilter(name: "CISourceOverCompositing") + compositeFilter?.setDefaults() + } + } + } + + func applyFilter(on image: CIImage) -> CIImage? { + lutFilter?.setValue(image, forKey: kCIInputImageKey) + if intensity == nil { + return lutFilter?.outputImage + } else { + if brightnessFactor != nil { + brightnessFilter?.setValue(lutFilter?.outputImage, forKey: kCIInputImageKey) + return brightnessFilter?.outputImage + } else { + alphaFilter?.setValue(lutFilter?.outputImage, forKey: kCIInputImageKey) + compositeFilter?.setValue(alphaFilter?.outputImage, forKey: kCIInputImageKey) + compositeFilter?.setValue(image, forKey: kCIInputBackgroundImageKey) + return compositeFilter?.outputImage + } + } + } +} + +public extension CIFilter { + static func filter(with lutUIImage: UIImage) -> CIFilter? { + guard let lutCGImage = lutUIImage.cgImage else { + print("ERROR: Invalid colorLUT"); + return nil + } + let size = 64 + let lutWidth = lutCGImage.width + let lutHeight = lutCGImage.height + let rowCount = lutHeight / size + let columnCount = lutWidth / size + + guard lutWidth % size == 0 && lutHeight % size == 0 && rowCount * columnCount == size else { + print("ERROR: Invalid colorLUT image size, width:\(lutWidth) height:\(lutHeight)"); + return nil + } + + guard let bitmap = getBytesFromImage(image: lutUIImage) else { + print("ERROR: Cannot get byte from image") + return nil + } + + let floatSize = MemoryLayout.size + let cubeData = UnsafeMutablePointer.allocate(capacity: size * size * size * 4 * floatSize) + var z = 0 + var bitmapOffset = 0 + + for _ in 0 ..< rowCount { + for y in 0 ..< size { + let tmp = z + for _ in 0 ..< columnCount { + for x in 0 ..< size { + let alpha = Float(bitmap[bitmapOffset]) / 255.0 + let red = Float(bitmap[bitmapOffset+1]) / 255.0 + let green = Float(bitmap[bitmapOffset+2]) / 255.0 + let blue = Float(bitmap[bitmapOffset+3]) / 255.0 + + let dataOffset = (z * size * size + y * size + x) * 4 + + cubeData[dataOffset + 3] = alpha + cubeData[dataOffset + 2] = red + cubeData[dataOffset + 1] = green + cubeData[dataOffset + 0] = blue + bitmapOffset += 4 + } + z += 1 + } + z = tmp + } + z += columnCount + } + + // create CIColorCube Filter + let colorCubeData = NSData(bytesNoCopy: cubeData, length: size * size * size * 4 * floatSize, freeWhenDone: true) + guard let filter = CIFilter(name: "CIColorCube") else { + print("ERROR: Cannot get CIColorCube filter") + return nil + } + filter.setValue(colorCubeData, forKey: "inputCubeData") + filter.setValue(size, forKey: "inputCubeDimension") + return filter + } + + static func getBytesFromImage(image: UIImage?) -> [UInt8]? { + var pixelValues: [UInt8]? + if let imageRef = image?.cgImage { + let width = Int(imageRef.width) + let height = Int(imageRef.height) + let bitsPerComponent = 8 + let bytesPerRow = width * 4 + let totalBytes = height * bytesPerRow + + let bitmapInfo = CGImageAlphaInfo.premultipliedLast.rawValue | CGBitmapInfo.byteOrder32Little.rawValue + let colorSpace = CGColorSpaceCreateDeviceRGB() + var intensities = [UInt8](repeating: 0, count: totalBytes) + + let contextRef = CGContext(data: &intensities, width: width, height: height, bitsPerComponent: bitsPerComponent, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo) + contextRef?.draw(imageRef, in: CGRect(x: 0.0, y: 0.0, width: CGFloat(width), height: CGFloat(height))) + + pixelValues = intensities + } + return pixelValues + } +} diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 3eb056db..c4d3c673 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -1,4 +1,5 @@ import AVFoundation +import CoreImage public protocol AudioEncodingTarget { func activateAudioTrack() throws @@ -73,6 +74,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } } + private var ciFilter: CILookupFilter? + private var cpuCIContext: CIContext? public private(set) var pixelBuffer:CVPixelBuffer? = nil public var dropFirstFrames: Int = 0 public var waitUtilDataIsReadyForLiveVideo = false @@ -163,6 +166,37 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.movieProcessingContext = movieProcessingContext } + public func setupSoftwareLUTFilter(lutImage: UIImage, intensity: Double? = nil, brightnessFactor: Double? = nil, sync: Bool = true) { + let block: () -> () = { [weak self] in + if self?.cpuCIContext == nil { + let colorSpace = CGColorSpaceCreateDeviceRGB() + let options: [CIContextOption: AnyObject] = [ + .workingColorSpace: colorSpace, + .outputColorSpace : colorSpace, + .useSoftwareRenderer : NSNumber(value: true) + ] + self?.cpuCIContext = CIContext(options: options) + } + self?.ciFilter = CILookupFilter(lutImage: lutImage, intensity: intensity, brightnessFactor: brightnessFactor) + } + if sync { + sharedImageProcessingContext.runOperationSynchronously(block) + } else { + sharedImageProcessingContext.runOperationAsynchronously(block) + } + } + + public func cleanSoftwareFilter(sync: Bool = true) { + let block: () -> () = { [weak self] in + self?.ciFilter = nil + } + if sync { + sharedImageProcessingContext.runOperationSynchronously(block) + } else { + sharedImageProcessingContext.runOperationAsynchronously(block) + } + } + public func startRecording(sync: Bool = false, manualControlState: Bool = false, _ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { // Don't do this work on the movieProcessingContext queue so we don't block it. // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) @@ -185,8 +219,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } print("MovieOutput starting writing...") var success = false + let assetWriter = self.assetWriter try NSObject.catchException { - success = self.assetWriter.startWriting() + success = assetWriter.startWriting() } if(!success) { @@ -594,6 +629,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { print("WARNING: video input is not ready at time: \(time))") break } + if let ciFilter = ciFilter { + let originalImage = CIImage(cvPixelBuffer: buffer) + if let outputImage = ciFilter.applyFilter(on: originalImage), let ciContext = cpuCIContext { + ciContext.render(outputImage, to: buffer) + } + } let bufferInput = self.assetWriterPixelBufferInput var appendResult = false try NSObject.catchException { From ca7f85c59b8a4c4639a740d443a9c686fc626cec Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 26 Feb 2020 19:48:52 +0800 Subject: [PATCH 219/332] chore: add more player log --- framework/Source/iOS/MoviePlayer.swift | 28 +++++++++++++------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 1d17f325..ca03cb88 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -143,7 +143,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { remove(item) super.insert(item, after: afterItem) } - print("insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) itemsCount:\(items().count)") + print("insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count)") } override public func replaceCurrentItem(with item: AVPlayerItem?) { @@ -172,7 +172,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } else { super.replaceCurrentItem(with: item) } - print("replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) enableVideoOutput:\(enableVideoOutput) itemsCount:\(items().count)") + print("replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count) ") } public func replayLastItem() { @@ -183,23 +183,23 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } else { play() } - print("replay last item:\(playerItem)") + print("replay last item:\(playerItem) currentTime:\(currentTime().seconds)") } override public func remove(_ item: AVPlayerItem) { super.remove(item) - print("remove item:\(item)") + print("remove item:\(item) currentTime:\(currentTime().seconds)") } override public func removeAllItems() { _stopLoopingIfNeeded() super.removeAllItems() - print("remove all items") + print("remove all items currentTime:\(currentTime().seconds)") } override public func advanceToNextItem() { super.advanceToNextItem() - print("advance to next item") + print("advance to next item currentTime:\(currentTime().seconds)") } // MARK: - @@ -230,7 +230,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } isPlaying = true isProcessing = false - print("movie player start duration:\(String(describing: asset?.duration.seconds)) items:\(String(describing: items()))") + print("movie player start currentTime:\(currentTime().seconds) duration:\(String(describing: asset?.duration.seconds)) items:\(items())") _setupDisplayLinkIfNeeded() _resetTimeObservers() if shouldUseLooper { @@ -255,19 +255,19 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public func resume() { isPlaying = true rate = playrate - print("movie player resume \(String(describing: asset))") + print("movie player resume currentTime:\(currentTime().seconds) \(String(describing: asset))") } override public func pause() { isPlaying = false guard rate != 0 else { return } - print("movie player pause \(String(describing: asset))") + print("movie player pause currentTime:\(currentTime().seconds) \(String(describing: asset))") super.pause() } public func stop() { pause() - print("movie player stop \(String(describing: asset))") + print("movie player stop currentTime:\(currentTime().seconds) \(String(describing: asset))") _timeObserversUpdate { [weak self] in self?.timeObserversQueue.removeAll() } @@ -470,12 +470,12 @@ private extension MoviePlayer { } func playerStatusDidChange() { - debugPrint("Player status change to:\(status.rawValue) asset:\(String(describing: asset))") + debugPrint("Player status change to:\(status.rawValue) asset:\(String(describing: asset)) currentTime:\(currentTime().seconds)") resumeIfNeeded() } func playerItemStatusDidChange(_ playerItem: AVPlayerItem) { - debugPrint("PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset)") + debugPrint("PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset) currentTime:\(currentTime().seconds)") if playerItem == currentItem { resumeIfNeeded() } @@ -569,7 +569,7 @@ private extension MoviePlayer { } @objc func playerDidPlayToEnd(notification: Notification) { - print("player did play to end. notification:\(notification) items:\(items())") + print("player did play to end. currentTime:\(currentTime().seconds) notification:\(notification) items:\(items())") guard (notification.object as? AVPlayerItem) == currentItem else { return } if needAddItemAfterDidEndNotify { DispatchQueue.main.async() { [weak self] in @@ -589,7 +589,7 @@ private extension MoviePlayer { } @objc func playerStalled(notification: Notification) { - print("player was stalled. notification:\(notification)") + print("player was stalled. currentTime:\(currentTime().seconds) notification:\(notification)") guard (notification.object as? AVPlayerItem) == currentItem else { return } } From 62c63ef905a470640af9d0d2c36b290a1f6d3d46 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 3 Mar 2020 23:52:05 +0800 Subject: [PATCH 220/332] improve(MoviePlayer): fix movieplayer pending items are not remove correctly, improve didPlayToEnd accuracy, and add more log --- framework/Source/iOS/MoviePlayer.swift | 70 +++++++++++++++++--------- 1 file changed, 47 insertions(+), 23 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index ca03cb88..46b6aba7 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -65,7 +65,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { return asset.imageOrientation ?? .portrait } public var didPlayToEnd: Bool { - return currentTime() >= assetDuration + return currentItem?.currentTime() ?? .zero >= assetDuration } public var hasTarget: Bool { targets.count > 0 } @@ -98,16 +98,30 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } private var didTriggerEndTimeObserver = false private var retryPlaying = false + /// Return the current item. If currentItem was played to end, will return next one + public var actualCurrentItem: AVPlayerItem? { + let playerItems = items() + guard playerItems.count > 0 else { return nil } + if didPlayToEnd { + if playerItems.count == 1 { + return nil + } else { + return playerItems[1] + } + } else { + return playerItems[0] + } + } public override init() { - print("movie player init") + print("[MoviePlayer] init") // Make sure player it intialized on the main thread, or it might cause KVO crash assert(Thread.isMainThread) super.init() } deinit { - print("movie player deinit \(String(describing: asset))") + print("[MoviePlayer] deinit \(String(describing: asset))") assert(observations.isEmpty, "observers must be removed before deinit") pause() displayLink?.invalidate() @@ -139,11 +153,18 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { if shouldDelayAddPlayerItem { needAddItemAfterDidEndNotify = true pendingNewItems.append(item) + print("[MoviePlayer] pending insert. pendingNewItems:\(pendingNewItems)") } else { + // Append previous pending items at first + if needAddItemAfterDidEndNotify { + needAddItemAfterDidEndNotify = false + pendingNewItems.forEach { insert($0, after: nil) } + pendingNewItems.removeAll() + } remove(item) super.insert(item, after: afterItem) } - print("insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count)") + print("[MoviePlayer] insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count)") } override public func replaceCurrentItem(with item: AVPlayerItem?) { @@ -169,10 +190,11 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { if shouldDelayAddPlayerItem && item != nil { needAddItemAfterDidEndNotify = true pendingNewItems.append(item!) + print("[MoviePlayer] pending replace. pendingNewItems:\(pendingNewItems)") } else { super.replaceCurrentItem(with: item) } - print("replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count) ") + print("[MoviePlayer] replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count) ") } public func replayLastItem() { @@ -183,23 +205,25 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } else { play() } - print("replay last item:\(playerItem) currentTime:\(currentTime().seconds)") + print("[MoviePlayer] replay last item:\(playerItem) currentTime:\(currentTime().seconds)") } override public func remove(_ item: AVPlayerItem) { super.remove(item) - print("remove item:\(item) currentTime:\(currentTime().seconds)") + pendingNewItems.removeAll { $0 == item } + print("[MoviePlayer] remove item:\(item) currentTime:\(currentTime().seconds)") } override public func removeAllItems() { _stopLoopingIfNeeded() super.removeAllItems() - print("remove all items currentTime:\(currentTime().seconds)") + pendingNewItems.removeAll() + print("[MoviePlayer] remove all items currentTime:\(currentTime().seconds)") } override public func advanceToNextItem() { super.advanceToNextItem() - print("advance to next item currentTime:\(currentTime().seconds)") + print("[MoviePlayer] advance to next item currentTime:\(currentTime().seconds)") } // MARK: - @@ -225,12 +249,12 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { guard currentItem != nil else { // Sometime the player.items() seems still 0 even if insert was called, but it won't result in crash, just print a error log for information. - print("ERROR! player currentItem is nil") + print("[MoviePlayer] ERROR! player currentItem is nil") return } isPlaying = true isProcessing = false - print("movie player start currentTime:\(currentTime().seconds) duration:\(String(describing: asset?.duration.seconds)) items:\(items())") + print("[MoviePlayer] start currentTime:\(currentTime().seconds) duration:\(String(describing: asset?.duration.seconds)) items:\(items())") _setupDisplayLinkIfNeeded() _resetTimeObservers() if shouldUseLooper { @@ -293,7 +317,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: .zero, toleranceAfter: .zero, shouldPlayAfterSeeking: shouldPlayAfterSeeking) } if assetDuration <= .zero { - print("cannot seek since assetDuration is 0. currentItem:\(String(describing: currentItem))") + print("[MoviePlayer] cannot seek since assetDuration is 0. currentItem:\(String(describing: currentItem))") } else { actuallySeekToTime() } @@ -397,7 +421,7 @@ private extension MoviePlayer { if loop, let looper = MoviePlayer.looperDict[self] { looper.disableLooping() MoviePlayer.looperDict[self] = nil - print("stop looping item)") + print("[MoviePlayer] stop looping item)") } } @@ -470,12 +494,12 @@ private extension MoviePlayer { } func playerStatusDidChange() { - debugPrint("Player status change to:\(status.rawValue) asset:\(String(describing: asset)) currentTime:\(currentTime().seconds)") + debugPrint("[MoviePlayer] Player status change to:\(status.rawValue) asset:\(String(describing: asset)) currentTime:\(currentTime().seconds)") resumeIfNeeded() } func playerItemStatusDidChange(_ playerItem: AVPlayerItem) { - debugPrint("PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset) currentTime:\(currentTime().seconds)") + debugPrint("[MoviePlayer] PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset) currentTime:\(currentTime().seconds)") if playerItem == currentItem { resumeIfNeeded() } @@ -496,13 +520,13 @@ private extension MoviePlayer { func _process(videoOutput: AVPlayerItemVideoOutput, at playTime: CMTime) { var timeForDisplay: CMTime = .zero guard let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: &timeForDisplay) else { - print("Failed to copy pixel buffer at time:\(playTime)") + print("[MoviePlayer] Failed to copy pixel buffer at time:\(playTime)") return } // Out of range when looping, skip process. So that it won't show unexpected frames. if loop && isPlaying && (timeForDisplay < actualStartTime || timeForDisplay >= actualEndTime) { - print("Skipped frame at time:\(timeForDisplay.seconds) is larger than range: [\(actualStartTime.seconds), \(actualEndTime.seconds)]") + print("[MoviePlayer] Skipped frame at time:\(timeForDisplay.seconds) is larger than range: [\(actualStartTime.seconds), \(actualEndTime.seconds)]") return } @@ -516,8 +540,8 @@ private extension MoviePlayer { if runBenchmark { let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) totalFrameTime += currentFrameTime - print("Average frame time :\(1000.0 * totalFrameTime / Double(totalFramesSent)) ms") - print("Current frame time :\(1000.0 * currentFrameTime) ms") + print("[MoviePlayer] Average frame time :\(1000.0 * totalFrameTime / Double(totalFramesSent)) ms") + print("[MoviePlayer] Current frame time :\(1000.0 * currentFrameTime) ms") } } @@ -529,13 +553,13 @@ private extension MoviePlayer { @objc func displayLinkCallback(displayLink: CADisplayLink) { if !retryPlaying && isPlaying && items().isEmpty { - print("Items is empty when playing. Retry playing") + print("[MoviePlayer] Items is empty when playing. Retry playing") retryPlaying = true replayLastItem() } else if !items().isEmpty { if retryPlaying { retryPlaying = false - print("Resume playing succeed") + print("[MoviePlayer] Resume playing succeed") } } guard currentItem?.status == .readyToPlay else { return } @@ -569,7 +593,7 @@ private extension MoviePlayer { } @objc func playerDidPlayToEnd(notification: Notification) { - print("player did play to end. currentTime:\(currentTime().seconds) notification:\(notification) items:\(items())") + print("[MoviePlayer] did play to end. currentTime:\(currentTime().seconds) notification:\(notification) items:\(items())") guard (notification.object as? AVPlayerItem) == currentItem else { return } if needAddItemAfterDidEndNotify { DispatchQueue.main.async() { [weak self] in @@ -589,7 +613,7 @@ private extension MoviePlayer { } @objc func playerStalled(notification: Notification) { - print("player was stalled. currentTime:\(currentTime().seconds) notification:\(notification)") + print("[MoviePlayer] player was stalled. currentTime:\(currentTime().seconds) notification:\(notification)") guard (notification.object as? AVPlayerItem) == currentItem else { return } } From a474c5335d91df0d3e858d022a7639f64c15b24b Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 6 Mar 2020 11:52:55 +0800 Subject: [PATCH 221/332] chore: fix sample projects compile error --- framework/GPUImage.xcodeproj/project.pbxproj | 4 ++++ framework/Source/iOS/CILookupFilter.swift | 2 ++ 2 files changed, 6 insertions(+) diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index 75244ce1..db6f6501 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -18,6 +18,7 @@ 1F6D1CBA2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; 1F6D1CBB2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; 1F6D1CC02048FFD900317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */; }; + 262E656D240F5F27002C27AB /* CILookupFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 262E656B240F5EE0002C27AB /* CILookupFilter.swift */; }; 264B6AD9237303370090979C /* MoviePlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 264B6AD6237303040090979C /* MoviePlayer.swift */; }; 264B6ADA2373033B0090979C /* FramebufferGenerator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 264B6AD5237303040090979C /* FramebufferGenerator.swift */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; @@ -393,6 +394,7 @@ 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = TPCircularBuffer.h; path = Source/TPCircularBuffer.h; sourceTree = ""; }; 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = TPCircularBuffer.m; path = Source/TPCircularBuffer.m; sourceTree = ""; }; 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SpeakerOutput.swift; path = Source/iOS/SpeakerOutput.swift; sourceTree = ""; }; + 262E656B240F5EE0002C27AB /* CILookupFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = CILookupFilter.swift; path = Source/iOS/CILookupFilter.swift; sourceTree = ""; }; 264B6AD5237303040090979C /* FramebufferGenerator.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = FramebufferGenerator.swift; path = Source/iOS/FramebufferGenerator.swift; sourceTree = ""; }; 264B6AD6237303040090979C /* MoviePlayer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = MoviePlayer.swift; path = Source/iOS/MoviePlayer.swift; sourceTree = ""; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; @@ -1161,6 +1163,7 @@ BC9E35201E524D2A00B8604F /* iOS */ = { isa = PBXGroup; children = ( + 262E656B240F5EE0002C27AB /* CILookupFilter.swift */, 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */, BC9E35231E524D4D00B8604F /* RenderView.swift */, BC9E35221E524D4D00B8604F /* PictureOutput.swift */, @@ -1693,6 +1696,7 @@ BC9E35961E52574A00B8604F /* ImageBuffer.swift in Sources */, BC9E35831E52571100B8604F /* LocalBinaryPattern.swift in Sources */, BC9E35C71E5257E700B8604F /* ExclusionBlend.swift in Sources */, + 262E656D240F5F27002C27AB /* CILookupFilter.swift in Sources */, BC9E35A31E52577300B8604F /* KuwaharaFilter.swift in Sources */, BC9E35481E524DA700B8604F /* CrosshairGenerator.swift in Sources */, BC9E35B81E5257B500B8604F /* SmoothToonFilter.swift in Sources */, diff --git a/framework/Source/iOS/CILookupFilter.swift b/framework/Source/iOS/CILookupFilter.swift index 32321e2b..3924b472 100644 --- a/framework/Source/iOS/CILookupFilter.swift +++ b/framework/Source/iOS/CILookupFilter.swift @@ -7,6 +7,8 @@ // import Foundation +import UIKit +import CoreImage public class CILookupFilter { private var lutFilter: CIFilter? From e21b6932a5cbd81736ad28bb3721dbe6f3d19cd9 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 6 Mar 2020 11:56:57 +0800 Subject: [PATCH 222/332] improve(MovieOutput): make optimizeForNetworkUse optional and add more log --- framework/Source/Pipeline.swift | 2 +- framework/Source/iOS/MovieOutput.swift | 12 +++++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index f9efa2e2..ffd2d27d 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -111,7 +111,7 @@ class WeakImageConsumer { public class TargetContainer:Sequence { private var targets = [WeakImageConsumer]() - var count:Int { get { return targets.count } } + public var count:Int { get { return targets.count } } #if !os(Linux) let dispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.targetContainerQueue", attributes: []) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index c4d3c673..ca883c9e 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -1,5 +1,6 @@ import AVFoundation import CoreImage +import UIKit public protocol AudioEncodingTarget { func activateAudioTrack() throws @@ -105,7 +106,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } var preferredTransform: CGAffineTransform? - public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { + public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { print("movie output init \(URL)") self.url = URL @@ -122,7 +123,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.size = size assetWriter = try AVAssetWriter(url:URL, fileType:fileType) - assetWriter.shouldOptimizeForNetworkUse = true + if optimizeForNetworkUse { + // NOTE: this is neccessary for streaming play support, but it will slow down finish writing speed + assetWriter.shouldOptimizeForNetworkUse = true + } var localSettings:[String:Any] if let videoSettings = videoSettings { @@ -277,6 +281,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { + print("MovieOutput start finishing writing, optimizeForNetworkUse:\(assetWriter.shouldOptimizeForNetworkUse)") _writerAsync { [weak self] in self?._cleanBufferCaches(shouldAppend: true) guard let self = self, self.state == .writing, @@ -301,10 +306,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if let lastFrame = self.previousFrameTime, let startFrame = self.startFrameTime { self.recordedDuration = lastFrame - startFrame } + print("MovieOutput did start finishing writing. Total frames appended:\(self.totalFramesAppended)") self.assetWriter.finishWriting { + print("MovieOutput did finish writing") completionCallback?() } - print("MovieOutput finished writing. Total frames appended:\(self.totalFramesAppended)") } } From 16509e821a87202145885dfbcde3344f87fa206c Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Thu, 12 Mar 2020 10:51:58 +0800 Subject: [PATCH 223/332] fix(player): fix player keep replaying. --- framework/Source/iOS/MoviePlayer.swift | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 46b6aba7..cb62f4ea 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -552,11 +552,7 @@ private extension MoviePlayer { } @objc func displayLinkCallback(displayLink: CADisplayLink) { - if !retryPlaying && isPlaying && items().isEmpty { - print("[MoviePlayer] Items is empty when playing. Retry playing") - retryPlaying = true - replayLastItem() - } else if !items().isEmpty { + if !items().isEmpty { if retryPlaying { retryPlaying = false print("[MoviePlayer] Resume playing succeed") From a9ed2437ccd93201036cce689b0daf923fec237d Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Thu, 12 Mar 2020 22:58:09 +0800 Subject: [PATCH 224/332] fix(player): fix player looping broken. --- framework/Source/iOS/MoviePlayer.swift | 31 ++++++++++++++++++++------ 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index cb62f4ea..2e3a4a49 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -97,6 +97,8 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { return false } private var didTriggerEndTimeObserver = false + private var didRegisterPlayerNotification = false + private var didNotifyEndedItem: AVPlayerItem? = nil private var retryPlaying = false /// Return the current item. If currentItem was played to end, will return next one public var actualCurrentItem: AVPlayerItem? { @@ -150,7 +152,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { lastPlayerItem = item self.enableVideoOutput = enableVideoOutput _setupPlayerObservers(playerItem: item) - if shouldDelayAddPlayerItem { + if shouldDelayAddPlayerItem && didNotifyEndedItem != item { needAddItemAfterDidEndNotify = true pendingNewItems.append(item) print("[MoviePlayer] pending insert. pendingNewItems:\(pendingNewItems)") @@ -164,14 +166,21 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { remove(item) super.insert(item, after: afterItem) } + didNotifyEndedItem = nil print("[MoviePlayer] insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count)") } + public func seekItem(_ item: AVPlayerItem, to time: CMTime, toleranceBefore: CMTime = .zero, toleranceAfter: CMTime = .zero, completionHandler: ((Bool) -> Void)? = nil) { + item.seek(to: time, toleranceBefore: toleranceBefore, toleranceAfter: toleranceAfter, completionHandler: completionHandler) + didNotifyEndedItem = nil + } + override public func replaceCurrentItem(with item: AVPlayerItem?) { replaceCurrentItem(with: item, enableVideoOutput: enableVideoOutput) } public func replaceCurrentItem(with item: AVPlayerItem?, enableVideoOutput: Bool) { + didNotifyEndedItem = nil lastPlayerItem = item // Stop looping before replacing if shouldUseLooper && MoviePlayer.looperDict[self] != nil { @@ -257,6 +266,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { print("[MoviePlayer] start currentTime:\(currentTime().seconds) duration:\(String(describing: asset?.duration.seconds)) items:\(items())") _setupDisplayLinkIfNeeded() _resetTimeObservers() + didNotifyEndedItem = nil if shouldUseLooper { if let playerItem = lastPlayerItem { MoviePlayer.looperDict[self]?.disableLooping() @@ -434,9 +444,12 @@ private extension MoviePlayer { } func _setupPlayerObservers(playerItem: AVPlayerItem?) { - _removePlayerObservers() - NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) - NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) + _removePlayerObservers(removeNotificationCenter: !didRegisterPlayerNotification) + if !didRegisterPlayerNotification { + NotificationCenter.default.addObserver(self, selector: #selector(playerDidPlayToEnd), name: .AVPlayerItemDidPlayToEndTime, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(playerStalled), name: .AVPlayerItemPlaybackStalled, object: nil) + didRegisterPlayerNotification = true + } observations.append(observe(\.status) { [weak self] _, _ in self?.playerStatusDidChange() }) @@ -450,9 +463,12 @@ private extension MoviePlayer { } } - func _removePlayerObservers() { - NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: nil) - NotificationCenter.default.removeObserver(self, name: .AVPlayerItemPlaybackStalled, object: nil) + func _removePlayerObservers(removeNotificationCenter: Bool = true) { + if removeNotificationCenter { + NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: nil) + NotificationCenter.default.removeObserver(self, name: .AVPlayerItemPlaybackStalled, object: nil) + didRegisterPlayerNotification = false + } observations.forEach { $0.invalidate() } observations.removeAll() } @@ -591,6 +607,7 @@ private extension MoviePlayer { @objc func playerDidPlayToEnd(notification: Notification) { print("[MoviePlayer] did play to end. currentTime:\(currentTime().seconds) notification:\(notification) items:\(items())") guard (notification.object as? AVPlayerItem) == currentItem else { return } + didNotifyEndedItem = currentItem if needAddItemAfterDidEndNotify { DispatchQueue.main.async() { [weak self] in guard let self = self else { return } From e4e67925210191f9f777b9d356a3b4ffc55303b1 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 18 Mar 2020 16:08:31 +0800 Subject: [PATCH 225/332] improve(MovieOutput): change MovieOutput render context to shared --- framework/Source/iOS/MovieInput.swift | 11 ++++--- framework/Source/iOS/MovieOutput.swift | 43 ++++++++++++++------------ 2 files changed, 30 insertions(+), 24 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 43f87dfc..4a84b308 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -354,7 +354,7 @@ public class MovieInput: ImageSource { func readNextVideoFrame(with assetReader: AVAssetReader, from videoTrackOutput:AVAssetReaderOutput) { guard let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() else { if let movieOutput = self.synchronizedMovieOutput { - movieOutput.movieProcessingContext.runOperationAsynchronously { + MovieOutput.movieProcessingContext.runOperationAsynchronously { // Documentation: "Clients that are monitoring each input's readyForMoreMediaData value must call markAsFinished on an input when they are done // appending buffers to it. This is necessary to prevent other inputs from stalling, as they may otherwise wait forever // for that input's media data, attempting to complete the ideal interleaving pattern." @@ -448,7 +448,7 @@ public class MovieInput: ImageSource { let shouldInvalidate = !transcodingOnly guard let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() else { if let movieOutput = self.synchronizedMovieOutput { - movieOutput.movieProcessingContext.runOperationAsynchronously { + MovieOutput.movieProcessingContext.runOperationAsynchronously { movieOutput.flushPendingAudioBuffers(shouldInvalidateSampleWhenDone: shouldInvalidate) movieOutput.audioEncodingIsFinished = true movieOutput.assetWriterAudioInput?.markAsFinished() @@ -460,7 +460,7 @@ public class MovieInput: ImageSource { self.synchronizedEncodingDebugPrint("Process audio sample input. Time:\(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)))") if let movieOutput = self.synchronizedMovieOutput { - movieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in guard let self = self else { return } self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidate) } @@ -479,7 +479,10 @@ public class MovieInput: ImageSource { func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { let startTime = CACurrentMediaTime() - guard let framebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: withSampleTime, videoOrientation: videoOrientation) else { return } + guard let framebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: withSampleTime, videoOrientation: videoOrientation) else { + print("Cannot generate framebuffer from YUVBuffer") + return + } framebuffer.userInfo = framebufferUserInfo self.movieFramebuffer = framebuffer self.updateTargetsWithFramebuffer(framebuffer) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index ca883c9e..18c471e9 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -86,7 +86,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public private(set) var audioSettings:[String:Any]? = nil public private(set) var audioSourceFormatHint:CMFormatDescription? - public let movieProcessingContext:OpenGLContext + public static let movieProcessingContext: OpenGLContext = { + var context: OpenGLContext? + sharedImageProcessingContext.runOperationSynchronously { + context = OpenGLContext() + } + return context! + }() public private(set) var videoPixelBufferCache = [(CVPixelBuffer, CMTime)]() public private(set) var videoSampleBufferCache = NSMutableArray() public private(set) var audioSampleBufferCache = [CMSampleBuffer]() @@ -112,12 +118,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.url = URL imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup - let movieProcessingContext = OpenGLContext() - if movieProcessingContext.supportsTextureCaches() { - self.colorSwizzlingShader = movieProcessingContext.passthroughShader + if Self.movieProcessingContext.supportsTextureCaches() { + self.colorSwizzlingShader = Self.movieProcessingContext.passthroughShader } else { - self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput"){try movieProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:ColorSwizzlingFragmentShader)} + self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput"){try Self.movieProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:ColorSwizzlingFragmentShader)} } self.size = size @@ -166,8 +171,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.audioSettings = audioSettings self.audioSourceFormatHint = audioSourceFormatHint - - self.movieProcessingContext = movieProcessingContext } public func setupSoftwareLUTFilter(lutImage: UIImage, intensity: Double? = nil, brightnessFactor: Double? = nil, sync: Bool = true) { @@ -342,7 +345,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private func _cleanBufferCaches(shouldAppend: Bool) { print("[Caching] Drain all buffers videoPixelBuffers:\(videoPixelBufferCache.count) audioSampleBuffer:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") - movieProcessingContext.runOperationSynchronously { + Self.movieProcessingContext.runOperationSynchronously { if shouldAppend { self._appendPixelBuffersFromCache() self._appendAudioBuffersFromCache() @@ -376,13 +379,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext que // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur // since the framebuffers come in much slower than during synchronized encoding. - movieProcessingContext.runOperationAsynchronously(work) + Self.movieProcessingContext.runOperationAsynchronously(work) } else { // This is done synchronously to prevent framebuffers from piling up during synchronized encoding. // If we don't force the sharedImageProcessingContext queue to wait for this frame to finish processing it will // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext queue would run when the system wants it to. - movieProcessingContext.runOperationSynchronously(work) + Self.movieProcessingContext.runOperationSynchronously(work) } } @@ -530,17 +533,17 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let bufferSize = GLSize(self.size) var cachedTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, Self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - renderFramebuffer = try Framebuffer(context:self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + renderFramebuffer = try Framebuffer(context:Self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:movieProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: movieProcessingContext) + renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:Self.movieProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: Self.movieProcessingContext) - if movieProcessingContext.supportsTextureCaches() { + if Self.movieProcessingContext.supportsTextureCaches() { glFinish() } else { glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) @@ -611,7 +614,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if encodingLiveVideo { - movieProcessingContext.runOperationSynchronously(state == .caching ? cache : work) + Self.movieProcessingContext.runOperationSynchronously(state == .caching ? cache : work) } else { (state == .caching ? cache : work)() } @@ -734,7 +737,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if encodingLiveVideo { - movieProcessingContext.runOperationSynchronously(state == .caching ? cache : work) + Self.movieProcessingContext.runOperationSynchronously(state == .caching ? cache : work) } else { (state == .caching ? cache : work)() } @@ -785,14 +788,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } private func _writerAsync(operation: @escaping () -> Void) { - MovieOutput.assetWriterQueue.async { [weak self] in - self?.movieProcessingContext.runOperationSynchronously(operation) + MovieOutput.assetWriterQueue.async { + Self.movieProcessingContext.runOperationSynchronously(operation) } } private func _writerSync(operation: @escaping () -> Void) { - MovieOutput.assetWriterQueue.sync { [weak self] in - self?.movieProcessingContext.runOperationSynchronously(operation) + MovieOutput.assetWriterQueue.sync { + Self.movieProcessingContext.runOperationSynchronously(operation) } } } From b1fae9bfde849f85226b6ec569422b501c3b7dfb Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Thu, 19 Mar 2020 19:53:38 +0800 Subject: [PATCH 226/332] fix(preview): fix preview playing wrong item. --- framework/Source/iOS/MoviePlayer.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 2e3a4a49..5a6cbfc4 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -152,7 +152,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { lastPlayerItem = item self.enableVideoOutput = enableVideoOutput _setupPlayerObservers(playerItem: item) - if shouldDelayAddPlayerItem && didNotifyEndedItem != item { + if shouldDelayAddPlayerItem && didNotifyEndedItem != nil && didNotifyEndedItem != item { needAddItemAfterDidEndNotify = true pendingNewItems.append(item) print("[MoviePlayer] pending insert. pendingNewItems:\(pendingNewItems)") @@ -165,9 +165,9 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } remove(item) super.insert(item, after: afterItem) + print("[MoviePlayer] insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count)") } didNotifyEndedItem = nil - print("[MoviePlayer] insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count)") } public func seekItem(_ item: AVPlayerItem, to time: CMTime, toleranceBefore: CMTime = .zero, toleranceAfter: CMTime = .zero, completionHandler: ((Bool) -> Void)? = nil) { From 4550719f15996074478fbc0bd8de2f53559507a0 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 26 Mar 2020 00:19:17 +0800 Subject: [PATCH 227/332] improve(MovieOutput): better match lock and unlock for PixelBuffer base address --- framework/Source/iOS/MovieOutput.swift | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 18c471e9..38246760 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -418,8 +418,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } catch { print("[Caching] WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") } - - CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) } private func _processPixelBufferCache(framebuffer: Framebuffer) { @@ -548,6 +546,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } else { glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) } + CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) } // MARK: Append buffer directly from CMSampleBuffer From a2a6502b2cfeb27818039573af11a0e399d2efe7 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 26 Mar 2020 10:57:41 +0800 Subject: [PATCH 228/332] improve(MovieOutput): change audio encode to async when encoding live video --- framework/Source/iOS/MovieOutput.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 38246760..de9d7713 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -736,7 +736,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if encodingLiveVideo { - Self.movieProcessingContext.runOperationSynchronously(state == .caching ? cache : work) + Self.movieProcessingContext.runOperationAsynchronously(state == .caching ? cache : work) } else { (state == .caching ? cache : work)() } From f54a0212a1b92f965ee8c00d5dcdf62898fa05a1 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 26 Mar 2020 10:58:24 +0800 Subject: [PATCH 229/332] improve(MoviePlayer): clean framebuffer cache after player dismiss --- framework/Source/iOS/MoviePlayer.swift | 3 +++ 1 file changed, 3 insertions(+) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 5a6cbfc4..65beb7a7 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -127,6 +127,9 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { assert(observations.isEmpty, "observers must be removed before deinit") pause() displayLink?.invalidate() + if hasTarget { + sharedImageProcessingContext.framebufferCache.purgeAllUnassignedFramebuffers() + } } // MARK: Data Source From 1d791fc4857c2b3e3bad0028d20dd0e43b515ff1 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 26 Mar 2020 12:12:17 +0800 Subject: [PATCH 230/332] fix(MoviePlayer): add more log and fix inserting is not working after last playerItem did play to end --- framework/Source/iOS/MoviePlayer.swift | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 65beb7a7..90ab982c 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -155,7 +155,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { lastPlayerItem = item self.enableVideoOutput = enableVideoOutput _setupPlayerObservers(playerItem: item) - if shouldDelayAddPlayerItem && didNotifyEndedItem != nil && didNotifyEndedItem != item { + if shouldDelayAddPlayerItem && didNotifyEndedItem != nil && didNotifyEndedItem != item && didNotifyEndedItem != items().last { needAddItemAfterDidEndNotify = true pendingNewItems.append(item) print("[MoviePlayer] pending insert. pendingNewItems:\(pendingNewItems)") @@ -174,6 +174,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } public func seekItem(_ item: AVPlayerItem, to time: CMTime, toleranceBefore: CMTime = .zero, toleranceAfter: CMTime = .zero, completionHandler: ((Bool) -> Void)? = nil) { + print("[MoviePlayer] seek item:\(item) to time:\(time.seconds) toleranceBefore:\(toleranceBefore.seconds) toleranceAfter:\(toleranceAfter.seconds)") item.seek(to: time, toleranceBefore: toleranceBefore, toleranceAfter: toleranceAfter, completionHandler: completionHandler) didNotifyEndedItem = nil } From 6d66fe4d1e9e489a3ff321dd08a4ff258c14b98c Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 8 Apr 2020 12:45:33 +0800 Subject: [PATCH 231/332] fix(MoviePlayer): fix ANR when accessing currentTime when player notification is triggered --- framework/Source/iOS/MoviePlayer.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 90ab982c..666304ff 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -514,12 +514,12 @@ private extension MoviePlayer { } func playerStatusDidChange() { - debugPrint("[MoviePlayer] Player status change to:\(status.rawValue) asset:\(String(describing: asset)) currentTime:\(currentTime().seconds)") + debugPrint("[MoviePlayer] Player status change to:\(status.rawValue) asset:\(String(describing: asset))") resumeIfNeeded() } func playerItemStatusDidChange(_ playerItem: AVPlayerItem) { - debugPrint("[MoviePlayer] PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset) currentTime:\(currentTime().seconds)") + debugPrint("[MoviePlayer] PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset)") if playerItem == currentItem { resumeIfNeeded() } From e232c87722cd6f5aa043c17bf8f3954209033542 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 20 Apr 2020 18:59:46 +0800 Subject: [PATCH 232/332] fix(MovieOutput): fix strange brightness blinking at the beginning of video --- framework/Source/iOS/MovieOutput.swift | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index de9d7713..4bad5bb9 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -97,6 +97,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public private(set) var videoSampleBufferCache = NSMutableArray() public private(set) var audioSampleBufferCache = [CMSampleBuffer]() public private(set) var cacheBuffersDuration: TimeInterval = 0 + public let disablePixelBufferAttachments: Bool var shouldInvalidateAudioSampleWhenDone = false var synchronizedEncodingDebug = false @@ -112,7 +113,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } var preferredTransform: CGAffineTransform? - public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { + public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { print("movie output init \(URL)") self.url = URL @@ -169,6 +170,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) assetWriter.add(assetWriterVideoInput) + self.disablePixelBufferAttachments = disablePixelBufferAttachments + self.audioSettings = audioSettings self.audioSourceFormatHint = audioSourceFormatHint } @@ -523,7 +526,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) throws { // Is this the first pixel buffer we have recieved? - if(renderFramebuffer == nil) { + // NOTE: this will cause strange frame brightness blinking for the first few seconds, be careful about using this. + if renderFramebuffer == nil && !disablePixelBufferAttachments { CVBufferSetAttachment(pixelBuffer, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) From 6f9acf95e2894054492c2df7b8ed2117eb5705cc Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 24 Apr 2020 16:06:33 +0800 Subject: [PATCH 233/332] fix(MovieOutput): fix can't stop video recording correctly and improve logging and reduce OOM --- framework/Source/iOS/MovieOutput.swift | 34 ++++++++++++++++---------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 4bad5bb9..6d276366 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -101,7 +101,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var shouldInvalidateAudioSampleWhenDone = false var synchronizedEncodingDebug = false - public private(set) var totalFramesAppended:Int = 0 + public private(set) var totalVideoFramesAppended = 0 + public private(set) var totalAudioFramesAppended = 0 private var observations = [NSKeyValueObservation]() deinit { @@ -112,6 +113,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return !encodingLiveVideo || waitUtilDataIsReadyForLiveVideo } var preferredTransform: CGAffineTransform? + private var isProcessing = false public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { @@ -301,6 +303,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.state = .finished + self.assetWriterAudioInput?.markAsFinished() + self.assetWriterVideoInput.markAsFinished() + if let lastFrame = self.previousFrameTime { // Resolve black frames at the end. Without this the end timestamp of the session's samples could be either video or audio. // Documentation: "You do not need to call this method; if you call finishWriting without @@ -312,7 +317,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if let lastFrame = self.previousFrameTime, let startFrame = self.startFrameTime { self.recordedDuration = lastFrame - startFrame } - print("MovieOutput did start finishing writing. Total frames appended:\(self.totalFramesAppended)") + print("MovieOutput did start finishing writing. Total frames appended video::\(self.totalVideoFramesAppended) audio:\(self.totalAudioFramesAppended)") self.assetWriter.finishWriting { print("MovieOutput did finish writing") completionCallback?() @@ -361,7 +366,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - glFinish(); + glFinish() if previousFrameTime == nil && videoSampleBufferCache.count <= 0 && videoPixelBufferCache.isEmpty && (state == .caching || state == .writing) { debugPrint("starting process new framebuffer when previousFrameTime == nil") @@ -373,6 +378,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } else { self?._processPixelBufferCache(framebuffer: framebuffer) } + self?.isProcessing = false sharedImageProcessingContext.runOperationAsynchronously { framebuffer.unlock() } @@ -382,6 +388,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext que // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur // since the framebuffers come in much slower than during synchronized encoding. + guard !isProcessing else { + framebuffer.unlock() + return + } + isProcessing = true Self.movieProcessingContext.runOperationAsynchronously(work) } else { @@ -465,12 +476,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { - synchronizedEncodingDebugPrint("Video waiting...") + print("Video waiting...") // Better to poll isReadyForMoreMediaData often since when it does become true // we don't want to risk letting framebuffers pile up in between poll intervals. usleep(100000) // 0.1 seconds if markIsFinishedAfterProcessing { - synchronizedEncodingDebugPrint("set videoEncodingIsFinished to true after processing") + print("set videoEncodingIsFinished to true after processing") markIsFinishedAfterProcessing = false videoEncodingIsFinished = true } @@ -485,10 +496,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { _renderAndCache(framebuffer: framebuffer) _appendPixelBuffersFromCache() } - - if videoEncodingIsFinished { - assetWriterVideoInput.markAsFinished() - } } private func _appendPixelBuffersFromCache() { @@ -515,7 +522,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { break } appendedBufferCount += 1 - self.totalFramesAppended += 1 + self.totalVideoFramesAppended += 1 } } } catch { @@ -608,7 +615,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } while(!self.assetWriterVideoInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.videoEncodingIsFinished) { - self.synchronizedEncodingDebugPrint("Video waiting...") + print("Video waiting...") // Better to poll isReadyForMoreMediaData often since when it does become true // we don't want to risk letting framebuffers pile up in between poll intervals. usleep(100000) // 0.1 seconds @@ -657,7 +664,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { break } appendedBufferCount += 1 - self.totalFramesAppended += 1 + self.totalVideoFramesAppended += 1 } } catch { print("WARNING: Trouble appending video sample buffer at time: \(time) \(error)") @@ -725,7 +732,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { print("Audio waiting...") usleep(100000) if !assetWriterAudioInput.isReadyForMoreMediaData { - self.synchronizedEncodingDebugPrint("Audio still not ready, skip this runloop...") + print("Audio still not ready, skip this runloop...") return } } @@ -765,6 +772,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { break } appendedBufferCount += 1 + totalAudioFramesAppended += 1 if shouldInvalidateAudioSampleWhenDone { CMSampleBufferInvalidate(audioBuffer) } From 689174fd2842cb8deaaa15102169dfa48bcdec30 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 27 Apr 2020 16:52:52 +0800 Subject: [PATCH 234/332] improve: add thread safety assert for target operations --- framework/Source/Pipeline.swift | 3 +++ 1 file changed, 3 insertions(+) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index ffd2d27d..8b696fd9 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -32,6 +32,7 @@ infix operator --> : AdditionPrecedence public extension ImageSource { func addTarget(_ target:ImageConsumer, atTargetIndex:UInt? = nil) { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) if let targetIndex = atTargetIndex { target.setSource(self, atIndex:targetIndex) targets.append(target, indexAtTarget:targetIndex) @@ -49,6 +50,7 @@ public extension ImageSource { } func removeAllTargets() { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) for (target, index) in targets { target.removeSourceAtIndex(index) } @@ -56,6 +58,7 @@ public extension ImageSource { } func remove(_ target:ImageConsumer) { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) for (testTarget, index) in targets { if(target === testTarget) { target.removeSourceAtIndex(index) From 24eac1913ca05bc0855fe0bccc275d37d9e61129 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 27 Apr 2020 19:00:37 +0800 Subject: [PATCH 235/332] improve: add assertion for checking thread safety when accessing sources and targets --- framework/Source/Pipeline.swift | 42 +++++++++++++++++++++++++++++---- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 8b696fd9..447fef2e 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -2,12 +2,14 @@ // MARK: Basic types import Foundation -public protocol ImageSource { +public protocol ImageSource: AnyObject { + var _needCheckSourceThread: Bool { get } var targets:TargetContainer { get } func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) } -public protocol ImageConsumer:AnyObject { +public protocol ImageConsumer: AnyObject { + var _needCheckConsumerThread: Bool { get } var maximumInputs:UInt { get } var sources:SourceContainer { get } @@ -31,8 +33,14 @@ infix operator --> : AdditionPrecedence // MARK: Extensions and supporting types public extension ImageSource { + var _needCheckSourceThread: Bool { + return true + } + func addTarget(_ target:ImageConsumer, atTargetIndex:UInt? = nil) { - __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + if _needCheckSourceThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } if let targetIndex = atTargetIndex { target.setSource(self, atIndex:targetIndex) targets.append(target, indexAtTarget:targetIndex) @@ -50,7 +58,9 @@ public extension ImageSource { } func removeAllTargets() { - __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + if _needCheckSourceThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } for (target, index) in targets { target.removeSourceAtIndex(index) } @@ -58,7 +68,9 @@ public extension ImageSource { } func remove(_ target:ImageConsumer) { - __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + if _needCheckSourceThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } for (testTarget, index) in targets { if(target === testTarget) { target.removeSourceAtIndex(index) @@ -89,17 +101,37 @@ public extension ImageSource { } public extension ImageConsumer { + var _needCheckConsumerThread: Bool { + return true + } + func addSource(_ source:ImageSource) -> UInt? { + if _needCheckConsumerThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } return sources.append(source, maximumInputs:maximumInputs) } func setSource(_ source:ImageSource, atIndex:UInt) { + if _needCheckConsumerThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } _ = sources.insert(source, atIndex:atIndex, maximumInputs:maximumInputs) } func removeSourceAtIndex(_ index:UInt) { + if _needCheckConsumerThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } sources.removeAtIndex(index) } + + func removeAllSources() { + if _needCheckConsumerThread { + __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) + } + sources.sources.removeAll() + } } class WeakImageConsumer { From 41dda5d856a7bce1e81629a02f24d5f32110557b Mon Sep 17 00:00:00 2001 From: RoCry Date: Tue, 28 Apr 2020 18:44:12 +0800 Subject: [PATCH 236/332] fix(output): skip when create texture failed --- framework/Source/iOS/MovieOutput.swift | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 6d276366..4f449040 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -542,7 +542,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let bufferSize = GLSize(self.size) var cachedTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, Self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + let ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, Self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + if ret != kCVReturnSuccess { + print("ret error: \(ret), pixelBuffer: \(pixelBuffer)") + return + } let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) renderFramebuffer = try Framebuffer(context:Self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) From a579f0e2c911492614edc57577f97a4f376c8d31 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Wed, 6 May 2020 12:18:46 +0800 Subject: [PATCH 237/332] fix(audio): correctly throws when fail to active audio target. --- framework/Source/iOS/MovieInput.swift | 29 +++++++++++++------------- framework/Source/iOS/MovieOutput.swift | 7 +++++-- 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 4a84b308..d20df2d9 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -15,21 +15,7 @@ public class MovieInput: ImageSource { public weak var delegate: MovieInputDelegate? - public var audioEncodingTarget:AudioEncodingTarget? { - didSet { - guard let audioEncodingTarget = audioEncodingTarget else { - return - } - do { - try audioEncodingTarget.activateAudioTrack() - } catch { - print("ERROR: Could not connect audio target with error: \(error)") - } - - // Call enableSynchronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput. - if(synchronizedMovieOutput != nil) { self.enableSynchronizedEncoding() } - } - } + public private(set) var audioEncodingTarget:AudioEncodingTarget? let yuvConversionShader:ShaderProgram public let asset:AVAsset @@ -511,6 +497,19 @@ public class MovieInput: ImageSource { } } + public func setAudioEncodingTarget(_ target: AudioEncodingTarget?) throws { + audioEncodingTarget = target + + guard let audioEncodingTarget = audioEncodingTarget else { + return + } + + try audioEncodingTarget.activateAudioTrack() + + // Call enableSynchronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput. + if(synchronizedMovieOutput != nil) { self.enableSynchronizedEncoding() } + } + // MARK: - // MARK: Synchronized encoding diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 4f449040..b509de4a 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -684,8 +684,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { throw MovieOutputError.activeAudioTrackError } assetWriterAudioInput = AVAssetWriterInput(mediaType:.audio, outputSettings:self.audioSettings, sourceFormatHint:self.audioSourceFormatHint) - - assetWriter.add(assetWriterAudioInput!) + let assetWriter = self.assetWriter + let audioInpupt = self.assetWriterAudioInput! + try NSObject.catchException { + assetWriter.add(audioInpupt) + } assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } From f475688aae91837034b6983b08b9fcbe57d8f831 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 3 Apr 2020 16:53:38 +0800 Subject: [PATCH 238/332] improve: specify a different dispatch queue label for MovieOutput --- framework/Source/iOS/MovieOutput.swift | 4 +++- framework/Source/iOS/OpenGLContext.swift | 5 +++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index b509de4a..60ee63e3 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -88,9 +88,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public static let movieProcessingContext: OpenGLContext = { var context: OpenGLContext? + imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup sharedImageProcessingContext.runOperationSynchronously { - context = OpenGLContext() + context = OpenGLContext(queueLabel: "com.GPUImage2.MovieOutput.imageProcess") } + imageProcessingShareGroup = nil return context! }() public private(set) var videoPixelBufferCache = [(CVPixelBuffer, CMTime)]() diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index b99d08e5..44d431a9 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -27,14 +27,15 @@ public class OpenGLContext: SerialDispatch { }() - public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", qos: .userInitiated) + public let serialDispatchQueue:DispatchQueue public let dispatchQueueKey = DispatchSpecificKey() public let dispatchQueueKeyValue: Int // MARK: - // MARK: Initialization and teardown - init() { + init(queueLabel: String? = nil) { + serialDispatchQueue = DispatchQueue(label: (queueLabel ?? "com.sunsetlakesoftware.GPUImage.processingQueue"), qos: .userInitiated) dispatchQueueKeyValue = dispatchQueKeyValueCounter serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:dispatchQueueKeyValue) dispatchQueKeyValueCounter += 1 From 8bb026011dd95869f15cfc4795527b5673bd23dc Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 3 Apr 2020 17:13:27 +0800 Subject: [PATCH 239/332] improve: introduce executeStartTime and alreadyExecuteTime for SerialDispatch --- framework/Source/SerialDispatch.swift | 15 ++++++++++++++- framework/Source/iOS/OpenGLContext.swift | 1 + 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index a7276489..555a5265 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -61,7 +61,8 @@ func runOnMainQueue(_ mainThreadOperation:() -> T) -> T { // MARK: - // MARK: SerialDispatch extension -public protocol SerialDispatch { +public protocol SerialDispatch: class { + var executeStartTime:TimeInterval? { get set } var serialDispatchQueue:DispatchQueue { get } var dispatchQueueKey:DispatchSpecificKey { get } var dispatchQueueKeyValue:Int { get } @@ -69,10 +70,20 @@ public protocol SerialDispatch { } public extension SerialDispatch { + var alreadyExecuteTime: TimeInterval { + if let executeStartTime = executeStartTime { + return CACurrentMediaTime() - executeStartTime + } else { + return 0.0 + } + } + func runOperationAsynchronously(_ operation:@escaping () -> ()) { self.serialDispatchQueue.async { + self.executeStartTime = CACurrentMediaTime() self.makeCurrentContext() operation() + self.executeStartTime = nil } } @@ -82,8 +93,10 @@ public extension SerialDispatch { operation() } else { self.serialDispatchQueue.sync { + self.executeStartTime = CACurrentMediaTime() self.makeCurrentContext() operation() + self.executeStartTime = nil } } } diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index 44d431a9..86b150ad 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -30,6 +30,7 @@ public class OpenGLContext: SerialDispatch { public let serialDispatchQueue:DispatchQueue public let dispatchQueueKey = DispatchSpecificKey() public let dispatchQueueKeyValue: Int + public var executeStartTime: TimeInterval? // MARK: - // MARK: Initialization and teardown From 197facb5890e07077b6aa2c5b8ffbf80c563e25f Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 3 Apr 2020 17:15:07 +0800 Subject: [PATCH 240/332] improve: introduce MovieCache and move cache management code from MovieOutput to it --- framework/Source/iOS/MovieCache.swift | 273 +++++++++++ framework/Source/iOS/MovieInput.swift | 2 +- framework/Source/iOS/MovieOutput.swift | 618 +++++++++---------------- 3 files changed, 501 insertions(+), 392 deletions(-) create mode 100644 framework/Source/iOS/MovieCache.swift diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift new file mode 100644 index 00000000..82b4694a --- /dev/null +++ b/framework/Source/iOS/MovieCache.swift @@ -0,0 +1,273 @@ +// +// MovieCache.swift +// GPUImage2 +// +// Created by 陈品霖 on 2020/3/27. +// + +import Foundation +import AVFoundation + +public enum MovieCacheError: Error { + case invalidState + case emptyMovieOutput +} + +public class MovieCache: ImageConsumer, AudioEncodingTarget { + public let sources = SourceContainer() + public let maximumInputs: UInt = 1 + public private(set) var movieOutput: MovieOutput? + public private(set) lazy var framebufferCache = [Framebuffer]() + public private(set) lazy var videoSampleBufferCache = NSMutableArray() + public private(set) lazy var audioSampleBufferCache = [CMSampleBuffer]() + public private(set) var cacheBuffersDuration: TimeInterval = 0 + public enum State: String { + case unknown + case idle + case caching + case writing + case stopped + case canceled + } + public private(set) var state = State.unknown + + public init() { + print("MovieCache init") + } + + public func startCaching(duration: TimeInterval) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._startCaching(duration: duration) + } + } + + public func setMovieOutput(_ movieOutput: MovieOutput) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._setMovieOutput(movieOutput) + } + } + + public func startWriting(_ completionCallback:((_ error: Error?) -> Void)? = nil) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._startWriting(completionCallback) + } + } + + public func stopWriting(_ completionCallback:((Error?) -> Void)? = nil) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._stopWriting(completionCallback) + } + } + + public func cancelWriting(_ completionCallback:(() -> Void)? = nil) { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._cancelWriting(completionCallback) + } + } + + public func stopCaching() { + MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in + self?._stopCaching() + } + } +} + +extension MovieCache { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { +// debugPrint("get new framebuffer time:\(framebuffer.timingStyle.timestamp?.asCMTime.seconds ?? .zero)") + guard shouldProcessBuffer else { return } + glFinish() + _cacheFramebuffer(framebuffer) + _writeFramebuffers() + } + + public func activateAudioTrack() throws { + try movieOutput?.activateAudioTrack() + } + + public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { + guard shouldProcessBuffer else { return } + _cacheAudioSampleBuffer(sampleBuffer) + _writeAudioSampleBuffers(shouldInvalidateSampleWhenDone) + } + + public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { + guard shouldProcessBuffer else { return } + _cacheVideoSampleBuffer(sampleBuffer) + _writeVideoSampleBuffers(shouldInvalidateSampleWhenDone) + } + + public func readyForNextAudioBuffer() -> Bool { + guard shouldProcessBuffer else { return false } + return true + } +} + +private extension MovieCache { + var shouldProcessBuffer: Bool { + return state != .unknown && state != .idle + } + + func _tryTransitingState(to newState: State) -> Bool { + guard state != newState else { return true } + switch (state, newState) { + case (.unknown, .idle), (.unknown, .caching), (.unknown, .writing), + (.idle, .caching), (.idle, .writing), (.idle, .canceled), + (.caching, .writing), (.caching, .stopped), (.caching, .canceled), (.caching, .idle), + (.writing, .stopped), (.writing, .canceled), + (.stopped, .idle), (.stopped, .writing), + (.canceled, .idle), (.canceled, .writing): + debugPrint("state transite from:\(state) to:\(newState)") + state = newState + return true + default: + assertionFailure() + print("ERROR: invalid state transition from:\(state) to:\(newState)") + return false + } + } + + func _startCaching(duration: TimeInterval) { + guard _tryTransitingState(to: .caching) else { return } + print("start caching") + cacheBuffersDuration = duration + } + + func _setMovieOutput(_ movieOutput: MovieOutput) { + guard state != .writing || self.movieOutput == nil else { + assertionFailure("Should not set MovieOutput during writing") + return + } + print("set movie output") + self.movieOutput = movieOutput + } + + func _startWriting(_ completionCallback:((_ error: Error?) -> Void)? = nil) { + guard _tryTransitingState(to: .writing) else { + completionCallback?(MovieCacheError.invalidState) + return + } + guard movieOutput != nil else { + print("movie output is not ready yet, waiting...") + completionCallback?(nil) + return + } + print("start writing") + movieOutput?.startRecording(sync: true) { _, error in + completionCallback?(error) + } + } + + func _stopWriting(_ completionCallback:((Error?) -> Void)? = nil) { + guard _tryTransitingState(to: .stopped), movieOutput != nil else { + completionCallback?(MovieCacheError.invalidState) + return + } + print("stop writing. videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") + movieOutput?.finishRecording(sync: true) { + completionCallback?(nil) + } + movieOutput = nil + } + + func _cancelWriting(_ completionCallback:(() -> Void)? = nil) { + guard _tryTransitingState(to: .canceled), let movieOutput = movieOutput else { + completionCallback?() + self.movieOutput = nil + return + } + print("cancel writing") + movieOutput.cancelRecording(sync: true) { + completionCallback?() + } + self.movieOutput = nil + } + + func _stopCaching() { + guard _tryTransitingState(to: .idle) else { return } + print("stop caching") + _cleanBufferCaches() + } + + func _cleanBufferCaches() { + print("Clean all buffers framebufferCache:\(framebufferCache.count) audioSampleBuffer:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") + sharedImageProcessingContext.runOperationSynchronously { + self.framebufferCache.forEach { $0.unlock() } + self.framebufferCache.removeAll() + self.videoSampleBufferCache.removeAllObjects() + self.audioSampleBufferCache.removeAll() + } + } + + func _cacheFramebuffer(_ framebuffer: Framebuffer) { + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { + print("Cannot get timestamp from framebuffer, dropping frame") + return + } + framebufferCache.append(framebuffer) + while let firstBufferTime = framebufferCache.first?.timingStyle.timestamp?.asCMTime, CMTimeSubtract(frameTime, firstBufferTime).seconds > cacheBuffersDuration { +// debugPrint("dropping oldest video framebuffer time:\(firstBufferTime.seconds)") + _ = framebufferCache.removeFirst() + } + } + + func _writeFramebuffers() { + guard state == .writing else { return } + var appendedBufferCount = 0 + for framebuffer in framebufferCache { + guard movieOutput?._processFramebuffer(framebuffer) == true else { break } + appendedBufferCount += 1 + framebuffer.unlock() + // NOTE: don't occupy too much GPU time, if it is already accumulate lots of framebuffer. + // So that it can reduce frame drop and video frames brightness flashing. + guard sharedImageProcessingContext.alreadyExecuteTime < 1.0 / 40.0 else { break } + } + framebufferCache.removeFirst(appendedBufferCount) + } + + func _cacheAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + audioSampleBufferCache.append(sampleBuffer) + while let firstBuffer = audioSampleBufferCache.first, CMTimeSubtract(frameTime, CMSampleBufferGetPresentationTimeStamp(firstBuffer)).seconds > cacheBuffersDuration { +// debugPrint("dropping oldest audio buffer time:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer)).seconds))") + _ = audioSampleBufferCache.removeFirst() + } + } + + func _writeAudioSampleBuffers(_ shouldInvalidateSampleWhenDone: Bool) { + guard state == .writing else { return } + var appendedBufferCount = 0 + for audioBuffer in audioSampleBufferCache { + // debugPrint("[Caching] appending audio buffer \(i+1)/\(self.audioSampleBufferCache.count) at:\(CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer).seconds)") + guard movieOutput?._processAudioSampleBuffer(audioBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) == true else { break } + appendedBufferCount += 1 + } + audioSampleBufferCache.removeFirst(appendedBufferCount) + } + + func _cacheVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + videoSampleBufferCache.add(sampleBuffer) + // debugPrint("[Caching] cache new video sample buffer at:\(frameTime.seconds)") + if videoSampleBufferCache.count >= 13 { + // Be careful of caching too much sample buffers from camera captureOutput. iOS has a hard limit of camera buffer count: 15. + // debugPrint("WARNING: almost reach system buffer limit: \(self.videoSampleBufferCache.count)/15") + } + while let firstBuffer = videoSampleBufferCache.firstObject, CMTimeSubtract(frameTime, CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer)).seconds > cacheBuffersDuration { +// debugPrint("dropping oldest video buffer time:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer).seconds)") + videoSampleBufferCache.removeObject(at: 0) + } + } + + private func _writeVideoSampleBuffers(_ shouldInvalidateSampleWhenDone: Bool) { + guard state == .writing else { return } + var appendedBufferCount = 0 + // Drain all cached buffers at first + for sampleBufferObject in videoSampleBufferCache { + let sampleBuffer = sampleBufferObject as! CMSampleBuffer + guard movieOutput?._processVideoSampleBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) == true else { break } + appendedBufferCount += 1 + } + videoSampleBufferCache.removeObjects(in: NSRange(0.. Void)? = nil) { + public func startRecording(sync: Bool = false, _ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { // Don't do this work on the movieProcessingContext queue so we don't block it. // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. @@ -242,6 +235,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { throw MovieOutputError.startWritingError(assetWriterError: self.assetWriter.error) } + // NOTE: pixelBufferPool is not multi-thread safe, and it will be accessed in another thread in order to improve the performance + self.pixelBufferPoolSemaphore.wait() + defer { + self.pixelBufferPoolSemaphore.signal() + } guard self.assetWriterPixelBufferInput.pixelBufferPool != nil else { /* When the pixelBufferPool returns nil, check the following: @@ -254,12 +252,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { throw MovieOutputError.pixelBufferPoolNilError } - if !manualControlState { - self.state = .writing - } else { - self.state = .idle - } - print("MovieOutput started writing") completionCallback?(true, nil) @@ -273,38 +265,23 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } if sync { - _writerSync(operation: block) + Self.movieProcessingContext.runOperationSynchronously(block) } else { - _writerAsync(operation: block) + Self.movieProcessingContext.runOperationAsynchronously(block) } } - public func startCachingWithoutWriting(duration: TimeInterval) { - print("MovieOutput starting caching. duration:\(duration)") - state = .caching - cacheBuffersDuration = duration - } - - public func finishCachingAndStartWriting() { - print("MovieOutput finish caching and start writing. cached buffer: videoPixelBuffers:\(videoPixelBufferCache.count) audioSampleBuffer:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") - state = .writing - } - - public func finishRecording(_ completionCallback:(() -> Void)? = nil) { + public func finishRecording(sync: Bool = false, _ completionCallback:(() -> Void)? = nil) { print("MovieOutput start finishing writing, optimizeForNetworkUse:\(assetWriter.shouldOptimizeForNetworkUse)") - _writerAsync { [weak self] in - self?._cleanBufferCaches(shouldAppend: true) - guard let self = self, self.state == .writing, - self.assetWriter.status == .writing else { - completionCallback?() - return + let block = { + guard self.assetWriter.status == .writing else { + completionCallback?() + return } self.audioEncodingIsFinished = true self.videoEncodingIsFinished = true - self.state = .finished - self.assetWriterAudioInput?.markAsFinished() self.assetWriterVideoInput.markAsFinished() @@ -313,6 +290,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Documentation: "You do not need to call this method; if you call finishWriting without // calling this method, the session's effective end time will be the latest end timestamp of // the session's samples (that is, no samples will be edited out at the end)." + print("MovieOutput start endSession") self.assetWriter.endSession(atSourceTime: lastFrame) } @@ -320,217 +298,144 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.recordedDuration = lastFrame - startFrame } print("MovieOutput did start finishing writing. Total frames appended video::\(self.totalVideoFramesAppended) audio:\(self.totalAudioFramesAppended)") + // Calling "finishWriting(AVAssetWriter A)" then "startWriting(AVAssetWriter B)" at the same time, + // will cause NSInternalInconsistencyException with error code 0. + // So we need to make sure these two methods will not run at the same time. + let dispatchGroup = DispatchGroup() + dispatchGroup.enter() self.assetWriter.finishWriting { print("MovieOutput did finish writing") + dispatchGroup.leave() completionCallback?() } + dispatchGroup.wait() + } + if sync { + Self.movieProcessingContext.runOperationSynchronously(block) + } else { + Self.movieProcessingContext.runOperationAsynchronously(block) } } - public func cancelRecording(_ completionCallback:(() -> Void)? = nil) { - _writerAsync { [weak self] in - self?._cleanBufferCaches(shouldAppend: false) - guard let self = self else { - completionCallback?() - return - } - self.state = .canceled + public func cancelRecording(sync: Bool = false, _ completionCallback:(() -> Void)? = nil) { + let block = { self.audioEncodingIsFinished = true self.videoEncodingIsFinished = true print("MovieOutput cancel writing, state:\(self.assetWriter.status.rawValue)") if self.assetWriter.status == .writing { + self.pixelBufferPoolSemaphore.wait() self.assetWriter.cancelWriting() + self.pixelBufferPoolSemaphore.signal() } completionCallback?() } - } - - private func _shouldProcessVideoBuffer() -> Bool { - guard state == .caching || state == .writing, assetWriter.status == .writing, !videoEncodingIsFinished else { - print("Guard fell through, dropping video frame. state:\(self.state) writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") - return false - } - return true - } - - private func _cleanBufferCaches(shouldAppend: Bool) { - print("[Caching] Drain all buffers videoPixelBuffers:\(videoPixelBufferCache.count) audioSampleBuffer:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") - Self.movieProcessingContext.runOperationSynchronously { - if shouldAppend { - self._appendPixelBuffersFromCache() - self._appendAudioBuffersFromCache() - self._appendVideoSampleBuffersFromCache() - } - self.videoPixelBufferCache.removeAll() - self.videoSampleBufferCache.removeAllObjects() - self.audioSampleBufferCache.removeAll() + if sync { + Self.movieProcessingContext.runOperationSynchronously(block) + } else { + Self.movieProcessingContext.runOperationAsynchronously(block) } } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish() - if previousFrameTime == nil && videoSampleBufferCache.count <= 0 && videoPixelBufferCache.isEmpty && (state == .caching || state == .writing) { + if previousFrameTime == nil { debugPrint("starting process new framebuffer when previousFrameTime == nil") } let work = { [weak self] in - if self?.state == .caching { - self?._renderAndCache(framebuffer: framebuffer) - } else { - self?._processPixelBufferCache(framebuffer: framebuffer) - } - self?.isProcessing = false + _ = self?._processFramebuffer(framebuffer) sharedImageProcessingContext.runOperationAsynchronously { framebuffer.unlock() } } - if encodingLiveVideo { - // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext que + // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext queue, // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur // since the framebuffers come in much slower than during synchronized encoding. - guard !isProcessing else { - framebuffer.unlock() - return - } - isProcessing = true - Self.movieProcessingContext.runOperationAsynchronously(work) + sharedImageProcessingContext.runOperationAsynchronously(work) } else { // This is done synchronously to prevent framebuffers from piling up during synchronized encoding. // If we don't force the sharedImageProcessingContext queue to wait for this frame to finish processing it will // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext queue would run when the system wants it to. - Self.movieProcessingContext.runOperationSynchronously(work) + sharedImageProcessingContext.runOperationSynchronously(work) } } - private func _renderAndCache(framebuffer: Framebuffer) { - // Discard first n frames - if dropFirstFrames > 0 { - dropFirstFrames -= 1 - print("Drop one frame. Left dropFirstFrames:\(dropFirstFrames)") - return - } - guard _shouldProcessVideoBuffer() else { return } - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { - print("Cannot get timestamp from framebuffer, dropping frame") - return - } - pixelBuffer = nil - let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) - guard pixelBuffer != nil && pixelBufferStatus == kCVReturnSuccess else { - print("[Caching] WARNING: Unable to create pixel buffer, dropping frame") - return + func _processFramebuffer(_ framebuffer: Framebuffer) -> Bool { + guard assetWriter.status == .writing, !videoEncodingIsFinished else { + print("Guard fell through, dropping video frame. writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") + return false } do { - try renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) - videoPixelBufferCache.append((pixelBuffer!, frameTime)) -// print("[Caching] appended new buffer at:\(frameTime.seconds)") - while let firstBufferTime = videoPixelBufferCache.first?.1, CMTimeSubtract(frameTime, firstBufferTime).seconds > cacheBuffersDuration { - _ = videoPixelBufferCache.removeFirst() -// print("[Caching] caching video duration reach up to:\(cacheBuffersDuration) dropped frame at:\(firstBuffer.1.seconds)") + // Ignore still images and other non-video updates (do I still need this?) + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { + print("Cannot get timestamp from framebuffer, dropping frame") + return false } - } catch { - print("[Caching] WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") - } - } - - private func _processPixelBufferCache(framebuffer: Framebuffer) { - if previousFrameTime == nil { - debugPrint("Got a new framebuffer when previousFrameTime is nil") - } - // Discard first n frames - if dropFirstFrames > 0 { - dropFirstFrames -= 1 - print("Drop one frame. Left dropFirstFrames:\(dropFirstFrames)") - return - } - - guard _shouldProcessVideoBuffer() else { return } - - // Ignore still images and other non-video updates (do I still need this?) - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { - print("Cannot get timestamp from framebuffer, dropping frame") - return - } - - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. - guard (frameTime != previousFrameTime) else { - print("WARNING: frameTime is as same as previousFrameTIme") - return - } - - if (previousFrameTime == nil) { - // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. - let startFrameTime = videoPixelBufferCache.first?.1 ?? frameTime - assetWriter.startSession(atSourceTime: startFrameTime) - self.startFrameTime = startFrameTime - print("did start writing at:\(startFrameTime.seconds)") - delegate?.movieOutputDidStartWriting(self, at: startFrameTime) - } - - previousFrameTime = frameTime - - guard assetWriterVideoInput.isReadyForMoreMediaData || shouldWaitForEncoding else { - print("WARNING: Had to drop a frame at time \(frameTime)") - return - } - - while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { - print("Video waiting...") - // Better to poll isReadyForMoreMediaData often since when it does become true - // we don't want to risk letting framebuffers pile up in between poll intervals. - usleep(100000) // 0.1 seconds - if markIsFinishedAfterProcessing { - print("set videoEncodingIsFinished to true after processing") - markIsFinishedAfterProcessing = false - videoEncodingIsFinished = true + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard frameTime != previousFrameTime else { + print("WARNING: frameTime is as same as previousFrameTIme") + return true } - } - - if !videoPixelBufferCache.isEmpty { - // If videoPixelBufferCache has too much buffers, processing current buffers to ease the burden of videoInput, or it will crash - _appendPixelBuffersFromCache() - _renderAndCache(framebuffer: framebuffer) - _appendPixelBuffersFromCache() - } else { - _renderAndCache(framebuffer: framebuffer) - _appendPixelBuffersFromCache() - } - } - - private func _appendPixelBuffersFromCache() { - guard state == .writing, assetWriter.status == .writing else { return } - var appendedBufferCount = 0 - do { - // Drain all cached buffers at first - if !videoPixelBufferCache.isEmpty { - for (_, (buffer, time)) in videoPixelBufferCache.enumerated() { - // debugPrint("appending video pixel buffer \(i+1)/\(self.videoPixelBufferCache.count) at:\(time.seconds)") - if !assetWriterVideoInput.isReadyForMoreMediaData { - // Avoid error when calling bufferInput.append - print("WARNING: video input is not ready at time: \(time))") - break - } - let bufferInput = assetWriterPixelBufferInput - var appendResult = false - // NOTE: when NSException was triggered within NSObject.catchException, the object inside the block seems cannot be released correctly, so be careful not to trigger error, or directly use "self." - try NSObject.catchException { - appendResult = bufferInput.append(buffer, withPresentationTime: time) - } - if !appendResult { - print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") - break - } - appendedBufferCount += 1 - self.totalVideoFramesAppended += 1 + + if previousFrameTime == nil { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. + assetWriter.startSession(atSourceTime: frameTime) + startFrameTime = frameTime + print("did start writing at:\(frameTime.seconds)") + delegate?.movieOutputDidStartWriting(self, at: frameTime) + } + previousFrameTime = frameTime + + pixelBuffer = nil + pixelBufferPoolSemaphore.wait() + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) + pixelBufferPoolSemaphore.signal() + guard pixelBuffer != nil && pixelBufferStatus == kCVReturnSuccess else { + print("WARNING: Unable to create pixel buffer, dropping frame") + return false + } + try renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) + guard assetWriterVideoInput.isReadyForMoreMediaData || shouldWaitForEncoding else { + print("WARNING: Had to drop a frame at time \(frameTime)") + return false + } + while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { + synchronizedEncodingDebugPrint("Video waiting...") + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds + if markIsFinishedAfterProcessing { + synchronizedEncodingDebugPrint("set videoEncodingIsFinished to true after processing") + markIsFinishedAfterProcessing = false + videoEncodingIsFinished = true } } + let bufferInput = assetWriterPixelBufferInput + var appendResult = false + synchronizedEncodingDebugPrint("appending video framebuffer at:\(frameTime.seconds)") + // NOTE: when NSException was triggered within NSObject.catchException, the object inside the block seems cannot be released correctly, so be careful not to trigger error, or directly use "self." + try NSObject.catchException { + appendResult = bufferInput.append(self.pixelBuffer!, withPresentationTime: frameTime) + } + if !appendResult { + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") + return false + } + totalVideoFramesAppended += 1 + + if videoEncodingIsFinished { + assetWriterVideoInput.markAsFinished() + } + + return true } catch { print("WARNING: Trouble appending pixel buffer \(error)") + return false } - videoPixelBufferCache.removeFirst(appendedBufferCount) } func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) throws { @@ -544,21 +449,21 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let bufferSize = GLSize(self.size) var cachedTextureRef:CVOpenGLESTexture? = nil - let ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, Self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + let ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) if ret != kCVReturnSuccess { print("ret error: \(ret), pixelBuffer: \(pixelBuffer)") return } let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - renderFramebuffer = try Framebuffer(context:Self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + renderFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:Self.movieProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: Self.movieProcessingContext) + renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: sharedImageProcessingContext) - if Self.movieProcessingContext.supportsTextureCaches() { + if sharedImageProcessingContext.supportsTextureCaches() { glFinish() } else { glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) @@ -568,114 +473,90 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // MARK: Append buffer directly from CMSampleBuffer public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { - let cache = { [weak self] in - guard let self = self else { return } - guard self._shouldProcessVideoBuffer() else { return } - - let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - - self.videoSampleBufferCache.add(sampleBuffer) -// debugPrint("[Caching] cache new video sample buffer at:\(frameTime.seconds)") - if self.videoSampleBufferCache.count >= 13 && self.encodingLiveVideo { - // Be careful of caching too much sample buffers from camera captureOutput. iOS has a hard limit of camera buffer count: 15. -// debugPrint("WARNING: almost reach system buffer limit: \(self.videoSampleBufferCache.count)/15") - } - while let firstBuffer = self.videoSampleBufferCache.firstObject, CMTimeSubtract(frameTime, CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer)).seconds > self.cacheBuffersDuration { - self.videoSampleBufferCache.removeObject(at: 0) -// debugPrint("[Caching] caching video duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer as! CMSampleBuffer).seconds)") - } - } - let work = { [weak self] in - defer { - if(shouldInvalidateSampleWhenDone) { - CMSampleBufferInvalidate(sampleBuffer) - } - } - guard let self = self else { return } - guard self._shouldProcessVideoBuffer() else { return } - - let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. - guard (frameTime != self.previousFrameTime) else { - print("Cannot get timestamp from framebuffer, dropping frame") - return - } - - self.videoSampleBufferCache.add(sampleBuffer) - - if (self.previousFrameTime == nil) { - // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. - let startFrameTime = self.videoSampleBufferCache.firstObject.map { CMSampleBufferGetPresentationTimeStamp($0 as! CMSampleBuffer) } ?? frameTime - self.assetWriter.startSession(atSourceTime: startFrameTime) - self.startFrameTime = startFrameTime - self.delegate?.movieOutputDidStartWriting(self, at: startFrameTime) - } - - self.previousFrameTime = frameTime - - guard (self.assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { - print("Had to drop a frame at time \(frameTime)") - return - } - - while(!self.assetWriterVideoInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.videoEncodingIsFinished) { - print("Video waiting...") - // Better to poll isReadyForMoreMediaData often since when it does become true - // we don't want to risk letting framebuffers pile up in between poll intervals. - usleep(100000) // 0.1 seconds - } - self._appendVideoSampleBuffersFromCache() + _ = self?._processVideoSampleBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) } if encodingLiveVideo { - Self.movieProcessingContext.runOperationSynchronously(state == .caching ? cache : work) + Self.movieProcessingContext.runOperationSynchronously(work) } else { - (state == .caching ? cache : work)() + work() } } - private func _appendVideoSampleBuffersFromCache() { - guard state == .writing, assetWriter.status == .writing else { return } - var appendedBufferCount = 0 - var time: CMTime = .zero + func _processVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) -> Bool { + defer { + if shouldInvalidateSampleWhenDone { + CMSampleBufferInvalidate(sampleBuffer) + } + } + + guard assetWriter.status == .writing, !videoEncodingIsFinished else { + print("Guard fell through, dropping video frame. writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") + return false + } + + let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard (frameTime != previousFrameTime) else { + print("Cannot get timestamp from framebuffer, dropping frame") + return false + } + + if previousFrameTime == nil { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. + let startFrameTime = frameTime + assetWriter.startSession(atSourceTime: startFrameTime) + self.startFrameTime = startFrameTime + delegate?.movieOutputDidStartWriting(self, at: startFrameTime) + } + + previousFrameTime = frameTime + + guard (assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { + print("Had to drop a frame at time \(frameTime)") + return false + } + + while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { + self.synchronizedEncodingDebugPrint("Video waiting...") + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds + } + let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + synchronizedEncodingDebugPrint("appending video sample buffer at:\(time.seconds)") + guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") + return false + } + if !assetWriterVideoInput.isReadyForMoreMediaData { + print("WARNING: video input is not ready at time: \(time))") + return false + } + if let ciFilter = ciFilter { + let originalImage = CIImage(cvPixelBuffer: buffer) + if let outputImage = ciFilter.applyFilter(on: originalImage), let ciContext = cpuCIContext { + ciContext.render(outputImage, to: buffer) + } + } + let bufferInput = assetWriterPixelBufferInput do { - // Drain all cached buffers at first - for (i, sampleBufferObject) in self.videoSampleBufferCache.enumerated() { - let sampleBuffer = sampleBufferObject as! CMSampleBuffer - time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - debugPrint("appending video sample buffer \(i+1)/\(self.videoSampleBufferCache.count) at:\(time.seconds)") - guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { - print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") - break - } - if !self.assetWriterVideoInput.isReadyForMoreMediaData { - print("WARNING: video input is not ready at time: \(time))") - break - } - if let ciFilter = ciFilter { - let originalImage = CIImage(cvPixelBuffer: buffer) - if let outputImage = ciFilter.applyFilter(on: originalImage), let ciContext = cpuCIContext { - ciContext.render(outputImage, to: buffer) - } - } - let bufferInput = self.assetWriterPixelBufferInput - var appendResult = false - try NSObject.catchException { - appendResult = bufferInput.append(buffer, withPresentationTime: time) - } - if (!appendResult) { - print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: self.assetWriter.error))") - break - } - appendedBufferCount += 1 - self.totalVideoFramesAppended += 1 + var appendResult = false + try NSObject.catchException { + appendResult = bufferInput.append(buffer, withPresentationTime: time) + } + if (!appendResult) { + print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: assetWriter.error))") + return false } + totalVideoFramesAppended += 1 } catch { print("WARNING: Trouble appending video sample buffer at time: \(time) \(error)") + return false } - self.videoSampleBufferCache.removeObjects(in: NSRange(0.. Bool { - guard state == .caching || state == .writing, assetWriter.status == .writing, !audioEncodingIsFinished else { - print("Guard fell through, dropping audio sample, state:\(self.state) writer.state:\(self.assetWriter.status.rawValue) audioEncodingIsFinished:\(self.audioEncodingIsFinished)") - return false + public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { + let work = { [weak self] in + _ = self?._processAudioSampleBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) + } + if encodingLiveVideo { + Self.movieProcessingContext.runOperationAsynchronously(work) + } else { + work() } - return true } - public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { - shouldInvalidateAudioSampleWhenDone = shouldInvalidateSampleWhenDone - let cache = { [weak self] in - guard let self = self else { return } - guard self._shouldProcessAudioBuffer() else { - if shouldInvalidateSampleWhenDone { - CMSampleBufferInvalidate(sampleBuffer) - } - return - } - let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - self.audioSampleBufferCache.append(sampleBuffer) - while let firstBuffer = self.audioSampleBufferCache.first, CMTimeSubtract(frameTime, CMSampleBufferGetPresentationTimeStamp(firstBuffer)).seconds > self.cacheBuffersDuration { - _ = self.audioSampleBufferCache.removeFirst() -// debugPrint("[Caching] caching audio duration reach up to:\(self.cacheBuffersDuration) dropped frame at:\(CMSampleBufferGetPresentationTimeStamp(firstBuffer).seconds)") - } + func _processAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) -> Bool { + guard assetWriter.status == .writing, !audioEncodingIsFinished, let audioInput = assetWriterAudioInput else { + print("Guard fell through, dropping audio sample, writer.state:\(assetWriter.status.rawValue) audioEncodingIsFinished:\(audioEncodingIsFinished)") + return false } - let work = { [weak self] in - guard let self = self else { return } - guard self._shouldProcessAudioBuffer(), let assetWriterAudioInput = self.assetWriterAudioInput else { - if shouldInvalidateSampleWhenDone { - CMSampleBufferInvalidate(sampleBuffer) - } - return - } - - let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - self.audioSampleBufferCache.append(sampleBuffer) - - guard (assetWriterAudioInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { + // Always accept audio buffer and cache it at first, since video frame might delay a bit + audioSampleBufferCache.append(sampleBuffer) + guard previousFrameTime != nil else { + print("Process audio sample but first video frame is not ready yet. Time:\(CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer).seconds)") + return true + } + + var processedBufferCount = 0 + for audioBuffer in audioSampleBufferCache { + let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer) + guard audioInput.isReadyForMoreMediaData || shouldWaitForEncoding else { print("Had to delay a audio sample at time \(currentSampleTime)") - return + break } - while(!assetWriterAudioInput.isReadyForMoreMediaData && self.shouldWaitForEncoding && !self.audioEncodingIsFinished) { + while !audioInput.isReadyForMoreMediaData && shouldWaitForEncoding && !audioEncodingIsFinished { print("Audio waiting...") usleep(100000) - if !assetWriterAudioInput.isReadyForMoreMediaData { - print("Audio still not ready, skip this runloop...") - return - } - } - - guard self.previousFrameTime != nil else { - print("Add audio sample to pending queue but first video frame is not ready yet. Time:\(CMTimeGetSeconds(currentSampleTime))") - return - } - - self.synchronizedEncodingDebugPrint("Process audio sample output. Time:\(CMTimeGetSeconds(currentSampleTime))") - self._appendAudioBuffersFromCache() - } - - if encodingLiveVideo { - Self.movieProcessingContext.runOperationAsynchronously(state == .caching ? cache : work) - } else { - (state == .caching ? cache : work)() - } - } - - private func _appendAudioBuffersFromCache() { - guard let audioInput = assetWriterAudioInput, state == .writing, assetWriter.status == .writing else { return } - var appendedBufferCount = 0 - do { - for (_, audioBuffer) in audioSampleBufferCache.enumerated() { - // debugPrint("[Caching] appending audio buffer \(i+1)/\(self.audioSampleBufferCache.count) at:\(CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer).seconds)") if !audioInput.isReadyForMoreMediaData { - print("WARNING: audio input is not ready at: \(CMSampleBufferGetPresentationTimeStamp(audioBuffer).seconds)") + synchronizedEncodingDebugPrint("Audio still not ready, skip this runloop...") break } + } + + synchronizedEncodingDebugPrint("Process audio sample output. Time:\(currentSampleTime.seconds)") + do { var appendResult = false try NSObject.catchException { appendResult = audioInput.append(audioBuffer) @@ -780,22 +626,24 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") break } - appendedBufferCount += 1 totalAudioFramesAppended += 1 - if shouldInvalidateAudioSampleWhenDone { + if shouldInvalidateSampleWhenDone { CMSampleBufferInvalidate(audioBuffer) } + processedBufferCount += 1 + } + catch { + print("WARNING: Trouble appending audio sample buffer: \(error)") + break } } - catch { - print("WARNING: Trouble appending audio sample buffer: \(error)") - } - self.audioSampleBufferCache.removeFirst(appendedBufferCount) + audioSampleBufferCache.removeFirst(processedBufferCount) + return true } public func flushPendingAudioBuffers(shouldInvalidateSampleWhenDone: Bool) { guard let lastBuffer = audioSampleBufferCache.popLast() else { return } - processAudioBuffer(lastBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) + _ = _processAudioSampleBuffer(lastBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) } // Note: This is not used for synchronized encoding, only live video. @@ -806,18 +654,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { func synchronizedEncodingDebugPrint(_ string: String) { if(synchronizedEncodingDebug && !encodingLiveVideo) { print(string) } } - - private func _writerAsync(operation: @escaping () -> Void) { - MovieOutput.assetWriterQueue.async { - Self.movieProcessingContext.runOperationSynchronously(operation) - } - } - - private func _writerSync(operation: @escaping () -> Void) { - MovieOutput.assetWriterQueue.sync { - Self.movieProcessingContext.runOperationSynchronously(operation) - } - } } From 6fc2919e34bc2f25f4487b2c031a33e309c33489 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 7 May 2020 15:12:01 +0800 Subject: [PATCH 241/332] fix: try catch possible crash --- framework/Source/iOS/MovieInput.swift | 55 ++++++++++++++------------- 1 file changed, 29 insertions(+), 26 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 69f1c62e..14104931 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -180,35 +180,38 @@ public class MovieInput: ImageSource { let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let assetReader = try AVAssetReader.init(asset: self.asset) + let assetReader = try AVAssetReader(asset: self.asset) - if(self.videoComposition == nil) { - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: .video).first!, outputSettings:outputSettings) - readerVideoTrackOutput.alwaysCopiesSampleData = false - assetReader.add(readerVideoTrackOutput) - } - else { - let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: .video), videoSettings: outputSettings) - readerVideoTrackOutput.videoComposition = self.videoComposition - readerVideoTrackOutput.alwaysCopiesSampleData = false - assetReader.add(readerVideoTrackOutput) - } - - if let audioTrack = self.asset.tracks(withMediaType: .audio).first, - let _ = self.audioEncodingTarget { - let readerAudioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: audioSettings) - readerAudioTrackOutput.alwaysCopiesSampleData = false - assetReader.add(readerAudioTrackOutput) - } - - self.startTime = self.requestedStartTime - if let startTime = self.requestedStartTime ?? self.trimmedStartTime { - if let trimmedDuration = self.trimmedDuration, trimmedDuration.seconds > 0, CMTimeAdd(startTime, trimmedDuration) <= asset.duration { - assetReader.timeRange = CMTimeRange(start: startTime, duration: trimmedDuration) - } else { - assetReader.timeRange = CMTimeRange(start: startTime, duration: .positiveInfinity) + try NSObject.catchException { + if(self.videoComposition == nil) { + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: .video).first!, outputSettings:outputSettings) + readerVideoTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerVideoTrackOutput) + } + else { + let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: .video), videoSettings: outputSettings) + readerVideoTrackOutput.videoComposition = self.videoComposition + readerVideoTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerVideoTrackOutput) + } + + if let audioTrack = self.asset.tracks(withMediaType: .audio).first, + let _ = self.audioEncodingTarget { + let readerAudioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: self.audioSettings) + readerAudioTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerAudioTrackOutput) + } + + self.startTime = self.requestedStartTime + if let startTime = self.requestedStartTime ?? self.trimmedStartTime { + if let trimmedDuration = self.trimmedDuration, trimmedDuration.seconds > 0, CMTimeAdd(startTime, trimmedDuration) <= self.asset.duration { + assetReader.timeRange = CMTimeRange(start: startTime, duration: trimmedDuration) + } else { + assetReader.timeRange = CMTimeRange(start: startTime, duration: .positiveInfinity) + } } } + self.requestedStartTime = nil self.currentTime = nil self.actualStartTime = nil From 2693f4a2ae0170a25c80d0b11cf2e1d2b9dc5751 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 13 May 2020 19:38:22 +0800 Subject: [PATCH 242/332] fix: add missing commit --- framework/Source/iOS/MovieCache.swift | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 82b4694a..b2c7e764 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -65,9 +65,9 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { } } - public func stopCaching() { + public func stopCaching(needsCancel: Bool = false) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in - self?._stopCaching() + self?._stopCaching(needsCancel: needsCancel) } } } @@ -183,7 +183,10 @@ private extension MovieCache { self.movieOutput = nil } - func _stopCaching() { + func _stopCaching(needsCancel: Bool) { + if needsCancel && state == .writing { + _cancelWriting() + } guard _tryTransitingState(to: .idle) else { return } print("stop caching") _cleanBufferCaches() From 5c486073b4eb1084feb06cf748a3b83103e09b48 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 14 May 2020 19:14:53 +0800 Subject: [PATCH 243/332] improve(MovieCaceh): improve new state from stoped to cancel --- framework/Source/iOS/MovieCache.swift | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index b2c7e764..bb7c41fc 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -115,7 +115,7 @@ private extension MovieCache { (.idle, .caching), (.idle, .writing), (.idle, .canceled), (.caching, .writing), (.caching, .stopped), (.caching, .canceled), (.caching, .idle), (.writing, .stopped), (.writing, .canceled), - (.stopped, .idle), (.stopped, .writing), + (.stopped, .idle), (.stopped, .writing), (.stopped, .canceled), (.canceled, .idle), (.canceled, .writing): debugPrint("state transite from:\(state) to:\(newState)") state = newState @@ -135,6 +135,7 @@ private extension MovieCache { func _setMovieOutput(_ movieOutput: MovieOutput) { guard state != .writing || self.movieOutput == nil else { + print("Should not set MovieOutput during writing") assertionFailure("Should not set MovieOutput during writing") return } From 3863152914d5fc82c4a6fb2b6f3498905f16fe9b Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 22 May 2020 19:28:44 +0800 Subject: [PATCH 244/332] improve(buffercache): avoid duplicated add buffer to cache --- framework/Source/Framebuffer.swift | 11 ++++++++++- framework/Source/FramebufferCache.swift | 12 ++++++++---- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index e7687ae7..b0d6a213 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -43,7 +43,7 @@ public enum FramebufferTimingStyle { } } -public class Framebuffer { +public class Framebuffer: Hashable { public var timingStyle:FramebufferTimingStyle = .stillImage public var orientation:ImageOrientation public var userInfo:[AnyHashable:Any]? @@ -58,6 +58,7 @@ public class Framebuffer { let hash:Int64 let textureOverride:Bool + let id = UUID().uuidString unowned var context:OpenGLContext @@ -188,6 +189,14 @@ public class Framebuffer { cache?.returnToCache(self) } } + + public static func == (lhs: Framebuffer, rhs: Framebuffer) -> Bool { + return lhs.id == rhs.id + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(id) + } } func hashForFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Int64 { diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index fcc83d13..8627daa3 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -15,7 +15,7 @@ // TODO: Add mechanism to purge framebuffers on low memory public class FramebufferCache { - var framebufferCache = [Int64:[Framebuffer]]() + var framebufferCache = [Int64:Set]() let context:OpenGLContext init(context:OpenGLContext) { @@ -32,7 +32,7 @@ public class FramebufferCache { if ((framebufferCache[hash]?.count ?? -1) > 0) { //print("Restoring previous framebuffer") - framebuffer = framebufferCache[hash]!.removeLast() + framebuffer = framebufferCache[hash]!.removeFirst() framebuffer.orientation = orientation } else { do { @@ -55,9 +55,13 @@ public class FramebufferCache { //sprint("Returning to cache: \(framebuffer)") context.runOperationSynchronously{ if (self.framebufferCache[framebuffer.hash] != nil) { - self.framebufferCache[framebuffer.hash]!.append(framebuffer) + if self.framebufferCache[framebuffer.hash]!.contains(framebuffer) { + print("WARNING: add duplicated buffer to cache.") + } else { + self.framebufferCache[framebuffer.hash]!.insert(framebuffer) + } } else { - self.framebufferCache[framebuffer.hash] = [framebuffer] + self.framebufferCache[framebuffer.hash] = Set([framebuffer]) } } } From 3a10a6aa0f4fdcf2fee062983c022edc49bd4a29 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Fri, 29 May 2020 12:36:16 +0800 Subject: [PATCH 245/332] fix(transcoding): fix audio encoding not cancel after reader cancel. --- framework/Source/iOS/MovieInput.swift | 1 + framework/Source/iOS/MovieOutput.swift | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 14104931..efa2d699 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -146,6 +146,7 @@ public class MovieInput: ImageSource { public func cancel() { self.currentThread?.cancel() self.currentThread = nil + (self.audioEncodingTarget as? MovieOutput)?.cancelRecodingImmediately() synchronizedEncodingDebugPrint("MovieInput cancel") } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index dcf28690..6cccead2 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -336,6 +336,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } + public func cancelRecodingImmediately() { + self.audioEncodingIsFinished = true + self.videoEncodingIsFinished = true + } + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish() From 596f87a7baef6db05f6de1feede80f084d1d0220 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 3 Jun 2020 18:03:21 +0800 Subject: [PATCH 246/332] fix(framebuffer): fix random crash when force unwrap weak value --- framework/Source/Pipeline.swift | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 447fef2e..c5d3e18a 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -175,30 +175,36 @@ public class TargetContainer:Sequence { return nil } - while (self.targets[index].value == nil) { + // NOTE: strong retain value, in case the value is released on another thread + var retainedValue = self.targets[index].value + while retainedValue == nil { self.targets.remove(at:index) if (index >= self.targets.count) { return nil } + retainedValue = self.targets[index].value } index += 1 - return (self.targets[index - 1].value!, self.targets[index - 1].indexAtTarget) + return (retainedValue!, self.targets[index - 1].indexAtTarget) #else return self.dispatchQueue.sync{ if (index >= self.targets.count) { return nil } - while (self.targets[index].value == nil) { + // NOTE: strong retain value, in case the value is released on another thread + var retainedValue = self.targets[index].value + while retainedValue == nil { self.targets.remove(at:index) if (index >= self.targets.count) { return nil } + retainedValue = self.targets[index].value } index += 1 - return (self.targets[index - 1].value!, self.targets[index - 1].indexAtTarget) + return (retainedValue!, self.targets[index - 1].indexAtTarget) } #endif } From 76688c59edd98ba655d05ef19177056c20317423 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 3 Jun 2020 20:08:37 +0800 Subject: [PATCH 247/332] fix(framebuffer): fix framebuffer cache accessing crash --- framework/Source/FramebufferCache.swift | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index 8627daa3..807c6a94 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -23,6 +23,7 @@ public class FramebufferCache { } public func requestFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Framebuffer { + __dispatch_assert_queue(context.serialDispatchQueue) let hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) let framebuffer:Framebuffer @@ -47,8 +48,16 @@ public class FramebufferCache { return framebuffer } - public func purgeAllUnassignedFramebuffers() { - framebufferCache.removeAll() + public func purgeAllUnassignedFramebuffers(sync: Bool = false) { + if sync { + context.runOperationSynchronously { + self.framebufferCache.removeAll() + } + } else { + context.runOperationAsynchronously { + self.framebufferCache.removeAll() + } + } } func returnToCache(_ framebuffer:Framebuffer) { From 59348432f34288206ca4ff30c307df67448dfdf6 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 3 Jun 2020 23:50:40 +0800 Subject: [PATCH 248/332] improve(movieplayer): move copyPixelBuffer to displaylink to avoid copy failed --- framework/Source/iOS/MoviePlayer.swift | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 666304ff..1a3ac3ff 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -88,7 +88,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { var nextSeeking: SeekingInfo? public var isSeeking = false public var enableVideoOutput = false - private var isProcessing = false + public private(set) var isProcessing = false private var needAddItemAfterDidEndNotify = false private lazy var pendingNewItems = [AVPlayerItem]() private var pendingSeekInfo: SeekingInfo? @@ -537,13 +537,7 @@ private extension MoviePlayer { // MARK: - // MARK: Internal processing functions - func _process(videoOutput: AVPlayerItemVideoOutput, at playTime: CMTime) { - var timeForDisplay: CMTime = .zero - guard let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: &timeForDisplay) else { - print("[MoviePlayer] Failed to copy pixel buffer at time:\(playTime)") - return - } - + func _process(_ pixelBuffer: CVPixelBuffer, at timeForDisplay: CMTime) { // Out of range when looping, skip process. So that it won't show unexpected frames. if loop && isPlaying && (timeForDisplay < actualStartTime || timeForDisplay >= actualEndTime) { print("[MoviePlayer] Skipped frame at time:\(timeForDisplay.seconds) is larger than range: [\(actualStartTime.seconds), \(actualEndTime.seconds)]") @@ -588,11 +582,17 @@ private extension MoviePlayer { } guard !isProcessing, videoOutput.hasNewPixelBuffer(forItemTime: playTime) == true else { return } isProcessing = true + var timeForDisplay: CMTime = .zero + guard let pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: playTime, itemTimeForDisplay: &timeForDisplay) else { + print("[MoviePlayer] Failed to copy pixel buffer at time:\(playTime)") + isProcessing = false + return + } sharedImageProcessingContext.runOperationAsynchronously { [weak self] in defer { self?.isProcessing = false } - self?._process(videoOutput: videoOutput, at: playTime) + self?._process(pixelBuffer, at: playTime) self?._notifyTimeObserver(with: playTime) } } From 05dc0396abb21311a076200ac9be6f50c7f4fb21 Mon Sep 17 00:00:00 2001 From: RoCry Date: Mon, 8 Jun 2020 21:34:09 +0800 Subject: [PATCH 249/332] feat(picture): support export with alpha for PictureOutput --- framework/Source/iOS/PictureOutput.swift | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index eacf321b..81501a0b 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -14,6 +14,7 @@ public class PictureOutput: ImageConsumer { public var cgImageAvailableCallback:((CGImage) -> ())? public var onlyCaptureNextFrame:Bool = true public var keepImageAroundForSynchronousCapture:Bool = false + public var exportWithAlpha = false var storedFramebuffer:Framebuffer? public let sources = SourceContainer() @@ -57,7 +58,8 @@ public class PictureOutput: ImageConsumer { renderFramebuffer.unlock() guard let dataProvider = CGDataProvider(dataInfo:nil, data:data, size:imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not allocate a CGDataProvider")} let defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB() - return CGImage(width:Int(framebuffer.size.width), height:Int(framebuffer.size.height), bitsPerComponent:8, bitsPerPixel:32, bytesPerRow:4 * Int(framebuffer.size.width), space:defaultRGBColorSpace, bitmapInfo:CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider:dataProvider, decode:nil, shouldInterpolate:false, intent:.defaultIntent)! + let bitmapInfo = exportWithAlpha ? CGBitmapInfo(rawValue: CGImageAlphaInfo.last.rawValue) : CGBitmapInfo() + return CGImage(width:Int(framebuffer.size.width), height:Int(framebuffer.size.height), bitsPerComponent:8, bitsPerPixel:32, bytesPerRow:4 * Int(framebuffer.size.width), space:defaultRGBColorSpace, bitmapInfo: bitmapInfo, provider:dataProvider, decode:nil, shouldInterpolate:false, intent:.defaultIntent)! } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { From 99963df93bf539f725a47481544bd01105696873 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 10 Jun 2020 17:13:26 +0800 Subject: [PATCH 250/332] feat: GaussianBlur support luminance threshold --- .../Source/Operations/GaussianBlur.swift | 54 +++++++++++++++---- framework/Source/SerialDispatch.swift | 1 + 2 files changed, 44 insertions(+), 11 deletions(-) diff --git a/framework/Source/Operations/GaussianBlur.swift b/framework/Source/Operations/GaussianBlur.swift index bab72ebc..6d3d70c6 100755 --- a/framework/Source/Operations/GaussianBlur.swift +++ b/framework/Source/Operations/GaussianBlur.swift @@ -12,16 +12,35 @@ public class GaussianBlur: TwoStageOperation { sharedImageProcessingContext.runOperationAsynchronously { self.downsamplingFactor = downsamplingFactor let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)))} + self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: self.luminanceThreshold), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: self.luminanceThreshold))} } } } - public init() { - blurRadiusInPixels = 2.0 - let pixelRadius = pixelRadiusForBlurSigma(round(Double(blurRadiusInPixels))) - let initialShader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:2.0), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:2.0))} + public var luminanceThreshold: Float? = nil { + didSet { + guard luminanceThreshold != oldValue else { return } + uniformSettings["luminanceThreshold"] = luminanceThreshold + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) + sharedImageProcessingContext.runOperationAsynchronously { + self.downsamplingFactor = downsamplingFactor + let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) + self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: self.luminanceThreshold), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: self.luminanceThreshold))} + } + } + } + + public init(blurRadiusInPixels: Float = 2.0, luminanceThreshold: Float? = nil) { + self.blurRadiusInPixels = blurRadiusInPixels + self.luminanceThreshold = luminanceThreshold + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:false) + let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) + let initialShader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: luminanceThreshold), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: luminanceThreshold))} super.init(shader:initialShader, numberOfInputs:1) + self.downsamplingFactor = downsamplingFactor + if let luminanceThreshold = luminanceThreshold { + self.uniformSettings["luminanceThreshold"] = luminanceThreshold + } } } @@ -76,7 +95,7 @@ func vertexShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> guard (radius > 0) else { return OneInputVertexShader } let numberOfBlurCoordinates = radius * 2 + 1 - var shaderString = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\(numberOfBlurCoordinates)];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" + var shaderString = "varying vec2 textureCoordinate;\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\(numberOfBlurCoordinates)];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" for currentBlurCoordinateIndex in 0.. } } - shaderString += "}\n" + shaderString += "textureCoordinate = inputTextureCoordinate.xy;\n}\n" return shaderString } @@ -135,25 +154,33 @@ func optimizedGaussianOffsetsForRadius(_ blurRadius:UInt, sigma:Double) -> [Doub return optimizedOffsets } -func vertexShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { +func vertexShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double, luminanceThreshold: Float? = nil) -> String { guard (radius > 0) else { return OneInputVertexShader } let optimizedOffsets = optimizedGaussianOffsetsForRadius(radius, sigma:sigma) let numberOfOptimizedOffsets = optimizedOffsets.count // Header - var shaderString = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\((1 + (numberOfOptimizedOffsets * 2)))];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" + var shaderString: String + if luminanceThreshold != nil { + shaderString = "varying vec2 textureCoordinate;\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\((1 + (numberOfOptimizedOffsets * 2)))];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" + } else { + shaderString = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\((1 + (numberOfOptimizedOffsets * 2)))];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" + } shaderString += "blurCoordinates[0] = inputTextureCoordinate.xy;\n" for currentOptimizedOffset in 0.. String { +func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double, luminanceThreshold: Float? = nil) -> String { guard (radius > 0) else { return PassthroughFragmentShader } let standardWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) @@ -161,7 +188,12 @@ func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double) let trueNumberOfOptimizedOffsets = radius / 2 + (radius % 2) #if GLES - var shaderString = "uniform sampler2D inputImageTexture;\n uniform highp float texelWidth;\n uniform highp float texelHeight;\n \n varying highp vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n lowp vec4 sum = vec4(0.0);\n" + var shaderString: String + if luminanceThreshold != nil { + shaderString = "varying highp vec2 textureCoordinate;\n uniform highp float luminanceThreshold;\n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n uniform sampler2D inputImageTexture;\n uniform highp float texelWidth;\n uniform highp float texelHeight;\n \n varying highp vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n highp float luminance = dot(textureColor.rgb, W);\n highp float thresholdResult = step(luminanceThreshold, luminance);\nif (thresholdResult == 0.0) {\ngl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n return;\n } \n lowp vec4 sum = vec4(0.0);\n" + } else { + shaderString = "uniform sampler2D inputImageTexture;\n uniform highp float texelWidth;\n uniform highp float texelHeight;\n \n varying highp vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n lowp vec4 sum = vec4(0.0);\n" + } #else var shaderString = "uniform sampler2D inputImageTexture;\n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n vec4 sum = vec4(0.0);\n" #endif diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 555a5265..3b0bd43d 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -1,4 +1,5 @@ import Foundation +import AVFoundation #if os(Linux) // For now, disable GCD on Linux and run everything on the main thread From a278f4c0fcbb9ec2c0252bed8d605293ba8852f3 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 10 Jun 2020 18:13:52 +0800 Subject: [PATCH 251/332] feat: add show case entry for soft focus and luminance gaussian blur --- .../FilterShowcase/FilterOperations.swift | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift index 7af4b396..ebd082ed 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift @@ -2,6 +2,43 @@ import GPUImage import QuartzCore let filterOperations: Array = [ + FilterOperation( + filter:{AlphaBlend()}, + listName:"Highlights Blur", + titleName:"Gaussian Blur Lumi>0.6(alpha)", + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.8), + sliderUpdateCallback: {(filter, sliderValue) in + filter.mix = sliderValue + }, + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + let blendFilter = filter as! AlphaBlend + blendFilter.removeAllSources() + + let gaussianBlur = GaussianBlur(blurRadiusInPixels: 10, luminanceThreshold: 0.6) + camera --> blendFilter + camera --> gaussianBlur --> blendFilter --> outputView + return blendFilter + }) + ), + FilterOperation( + filter:{AlphaBlend()}, + listName:"Soft Focus", + titleName:"Gaussian Blur + Alpha Blend", + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderUpdateCallback: {(filter, sliderValue) in + filter.mix = sliderValue + }, + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + let blendFilter = filter as! AlphaBlend + blendFilter.removeAllSources() + + let gaussianBlur = GaussianBlur(blurRadiusInPixels: 10, luminanceThreshold: 0.6) + gaussianBlur.blurRadiusInPixels = 10 + camera --> blendFilter + camera --> gaussianBlur --> blendFilter --> outputView + return blendFilter + }) + ), FilterOperation ( filter:{SaturationAdjustment()}, listName:"Saturation", From 2b515f6262f9afe4e280aa8b479ac16cad2de4fb Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 11 Jun 2020 01:05:29 +0800 Subject: [PATCH 252/332] chore: update project --- .../FilterShowcase.xcodeproj/project.pbxproj | 6 +- .../xcschemes/FilterShowcase.xcscheme | 78 +++++++++++++++++++ 2 files changed, 81 insertions(+), 3 deletions(-) create mode 100644 examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/xcshareddata/xcschemes/FilterShowcase.xcscheme diff --git a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj index 0dfc5aa4..1c954c57 100644 --- a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj +++ b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj @@ -209,7 +209,7 @@ TargetAttributes = { BC0037B6195CA11B00B9D651 = { CreatedOnToolsVersion = 6.0; - DevelopmentTeam = C9FPMJGKC4; + DevelopmentTeam = BCUYJQB9VH; LastSwiftMigration = 0800; ProvisioningStyle = Automatic; }; @@ -422,7 +422,7 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; - DEVELOPMENT_TEAM = C9FPMJGKC4; + DEVELOPMENT_TEAM = BCUYJQB9VH; INFOPLIST_FILE = FilterShowcaseSwift/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; @@ -437,7 +437,7 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; - DEVELOPMENT_TEAM = C9FPMJGKC4; + DEVELOPMENT_TEAM = BCUYJQB9VH; INFOPLIST_FILE = FilterShowcaseSwift/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; diff --git a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/xcshareddata/xcschemes/FilterShowcase.xcscheme b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/xcshareddata/xcschemes/FilterShowcase.xcscheme new file mode 100644 index 00000000..87d1eb0e --- /dev/null +++ b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/xcshareddata/xcschemes/FilterShowcase.xcscheme @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 9b68b54b635b521342b9958496268b0fa7e032d4 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 11 Jun 2020 01:11:25 +0800 Subject: [PATCH 253/332] chore: disable thread checking in demo app --- .../FilterShowcase/FilterShowcaseSwift/AppDelegate.swift | 5 +++-- framework/Source/Pipeline.swift | 6 ++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift index d353864b..86437726 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift @@ -1,12 +1,13 @@ import UIKit +import GPUImage @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { var window: UIWindow? - func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject : AnyObject]?) -> Bool { - return true + func applicationDidFinishLaunching(_ application: UIApplication) { + _needCheckFilterContainerThread = false } } diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index c5d3e18a..c1dcfcd7 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -2,6 +2,8 @@ // MARK: Basic types import Foundation +public var _needCheckFilterContainerThread: Bool? + public protocol ImageSource: AnyObject { var _needCheckSourceThread: Bool { get } var targets:TargetContainer { get } @@ -34,7 +36,7 @@ infix operator --> : AdditionPrecedence public extension ImageSource { var _needCheckSourceThread: Bool { - return true + return _needCheckFilterContainerThread ?? true } func addTarget(_ target:ImageConsumer, atTargetIndex:UInt? = nil) { @@ -102,7 +104,7 @@ public extension ImageSource { public extension ImageConsumer { var _needCheckConsumerThread: Bool { - return true + return _needCheckFilterContainerThread ?? true } func addSource(_ source:ImageSource) -> UInt? { From 0a3e4bcefa9371fd74ca0f3524dc1cf57a2505fd Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 1 Jul 2020 11:26:23 +0800 Subject: [PATCH 254/332] fix: fix possible crash when setting shader parameters --- framework/Source/ShaderUniformSettings.swift | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/framework/Source/ShaderUniformSettings.swift b/framework/Source/ShaderUniformSettings.swift index f67aed30..d2835db5 100644 --- a/framework/Source/ShaderUniformSettings.swift +++ b/framework/Source/ShaderUniformSettings.swift @@ -13,7 +13,21 @@ #endif public struct ShaderUniformSettings { - private var uniformValues = [String:Any]() + private static var lock = os_unfair_lock_s() + private var _uniformValues = [String:Any]() + private var uniformValues: [String:Any] { + get { + os_unfair_lock_lock(&Self.lock) + let temp = _uniformValues + os_unfair_lock_unlock(&Self.lock) + return temp + } + set { + os_unfair_lock_lock(&Self.lock) + _uniformValues = newValue + os_unfair_lock_unlock(&Self.lock) + } + } public init() { } @@ -54,7 +68,8 @@ public struct ShaderUniformSettings { } public func restoreShaderSettings(_ shader:ShaderProgram) { - for (uniform, value) in uniformValues { + let finalUniformValues = uniformValues + for (uniform, value) in finalUniformValues { switch value { case let value as Float: shader.setValue(GLfloat(value), forUniform:uniform) case let value as Int: shader.setValue(GLint(value), forUniform:uniform) From 45ab56e007581c22842bda924bd3293b9dbe8995 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Mon, 6 Jul 2020 11:50:42 +0800 Subject: [PATCH 255/332] fix(fast-forward): fix pitch issue when fast-forward. --- framework/Source/iOS/MoviePlayer.swift | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 1a3ac3ff..dec6f873 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -151,7 +151,9 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { if enableVideoOutput { _setupPlayerItemVideoOutput(for: item) } - item.audioTimePitchAlgorithm = .varispeed + if item.audioTimePitchAlgorithm == .lowQualityZeroLatency { + item.audioTimePitchAlgorithm = _audioPitchAlgorithm() + } lastPlayerItem = item self.enableVideoOutput = enableVideoOutput _setupPlayerObservers(playerItem: item) @@ -194,7 +196,9 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { if enableVideoOutput { _setupPlayerItemVideoOutput(for: item) } - item.audioTimePitchAlgorithm = .varispeed + if item.audioTimePitchAlgorithm == .lowQualityZeroLatency { + item.audioTimePitchAlgorithm = _audioPitchAlgorithm() + } _setupPlayerObservers(playerItem: item) } else { _removePlayerObservers() @@ -421,6 +425,16 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { _resetTimeObservers() loop = enabled } + + public func changePlayRate(to rate: Float) { + playrate = rate + items().forEach { + if $0.audioTimePitchAlgorithm == .lowQualityZeroLatency { + $0.audioTimePitchAlgorithm = _audioPitchAlgorithm() + } + } + resume() + } } private extension MoviePlayer { @@ -502,6 +516,12 @@ private extension MoviePlayer { } } + // Both algorithm has the highest quality + // Except .spectral has pitch correction, which is suitable for fast/slow play rate + func _audioPitchAlgorithm() -> AVAudioTimePitchAlgorithm { + return abs(playrate - 1.0) < .ulpOfOne ? .varispeed : .spectral + } + func onCurrentItemPlayToEnd() { if loop && isPlaying { start() From 3bf6cdcc46c021a468b8bf1a3f76f040ae2c51e1 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Wed, 8 Jul 2020 21:50:34 +0800 Subject: [PATCH 256/332] fix(fast-forward): fix restart when fast forward. --- framework/Source/iOS/MoviePlayer.swift | 3 +++ 1 file changed, 3 insertions(+) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index dec6f873..4befef1b 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -427,12 +427,15 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } public func changePlayRate(to rate: Float) { + let ct = currentTime() playrate = rate items().forEach { if $0.audioTimePitchAlgorithm == .lowQualityZeroLatency { $0.audioTimePitchAlgorithm = _audioPitchAlgorithm() } } + let toleranceTime = CMTime(seconds: 0.1, preferredTimescale: 600) + nextSeeking = SeekingInfo(time: ct, toleranceBefore: toleranceTime, toleranceAfter: toleranceTime, shouldPlayAfterSeeking: true) resume() } } From 07bc31e02974fb492c1a9d60dec837a9876d1700 Mon Sep 17 00:00:00 2001 From: RoCry Date: Wed, 8 Jul 2020 19:55:59 +0800 Subject: [PATCH 257/332] feat(pipeline): basic debug print --- .../Source/Operations/LookupFilter.swift | 6 +++ framework/Source/Pipeline.swift | 51 +++++++++++++++++++ framework/Source/iOS/PictureInput.swift | 13 +++-- framework/Source/iOS/RenderView.swift | 5 ++ 4 files changed, 72 insertions(+), 3 deletions(-) diff --git a/framework/Source/Operations/LookupFilter.swift b/framework/Source/Operations/LookupFilter.swift index 1a4996c2..72731839 100755 --- a/framework/Source/Operations/LookupFilter.swift +++ b/framework/Source/Operations/LookupFilter.swift @@ -15,4 +15,10 @@ public class LookupFilter: BasicOperation { ({intensity = 1.0})() } } + +extension LookupFilter: DebugPipelineNameable { + public var debugNameForPipeline: String { + "LookupFilter(\(lookupImage?.imageName ?? "null")/\(intensity))" + } +} #endif diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index c1dcfcd7..70af36a2 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -300,6 +300,57 @@ public class ImageRelay: ImageProcessingOperation { } } +public protocol DebugPipelineNameable { + var debugNameForPipeline: String { get } +} + +private func simpleName(_ obj: T) -> String { + if let obj = obj as? DebugPipelineNameable { + return obj.debugNameForPipeline + } + + let origin = String(describing: obj) + return origin.split(separator: ".").last.map { String($0) } ?? origin +} + +extension OperationGroup { + public var debugPipelineDescription: String { + // if group have custom name, do not use relay.description + if let obj = self as? DebugPipelineNameable { + return obj.debugNameForPipeline + } + + return "[\(simpleName(self)) -> \(inputImageRelay.debugPipelineDescription)]" + } +} + +public extension ImageSource { + var debugPipelineDescription: String { + let nextInfos: [String] = targets.map { consumer, _ in + if let c = consumer as? OperationGroup { + return c.debugPipelineDescription + } + + if let c = consumer as? ImageRelay { + return c.debugPipelineDescription + } + + if let c = consumer as? ImageSource { + return c.debugPipelineDescription + } + + return simpleName(consumer) + } + let nextInfosText = nextInfos.joined(separator: " -> ") + + if self is ImageRelay { + return nextInfosText + } + + return "\(simpleName(self)) -> \(nextInfosText)" + } +} + #if DEBUG public extension ImageSource { var debugPipelineNext: String { diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 099f0972..f84ea06a 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -26,9 +26,16 @@ public class PictureInput: ImageSource { public let targets = TargetContainer() public private(set) var imageFramebuffer:Framebuffer? public var framebufferUserInfo:[AnyHashable:Any]? + public let imageName: String var hasProcessedImage:Bool = false - public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { + public init( + image: CGImage, + imageName: String? = nil, + smoothlyScaleOutput: Bool = false, + orientation: ImageOrientation = .portrait) throws { + self.imageName = imageName ?? "CGImage" + let widthOfImage = GLint(image.width) let heightOfImage = GLint(image.height) @@ -137,12 +144,12 @@ public class PictureInput: ImageSource { } public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { - try self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + try self.init(image:image.cgImage!, imageName:"UIImage", smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { guard let image = UIImage(named:imageName) else { throw PictureInputError.noSuchImageError(imageName: imageName) } - try self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + try self.init(image:image.cgImage!, imageName:imageName, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } deinit { diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index e7043983..e58c25a9 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -222,3 +222,8 @@ public class RenderView:UIView, ImageConsumer { } } +extension RenderView: DebugPipelineNameable { + public var debugNameForPipeline: String { + return "RenderView" + } +} From 06a1c6eec78b3e085e62320b39ab532c34646f29 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Thu, 9 Jul 2020 13:01:41 +0800 Subject: [PATCH 258/332] improve(context): expose some context property for PixelBufferOutput --- framework/Source/iOS/OpenGLContext.swift | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index 86b150ad..bcbab83b 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -16,11 +16,11 @@ public class OpenGLContext: SerialDispatch { public let context:EAGLContext - lazy var passthroughShader:ShaderProgram = { + public private(set) lazy var passthroughShader:ShaderProgram = { return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} }() - lazy var coreVideoTextureCache:CVOpenGLESTextureCache = { + public private(set) lazy var coreVideoTextureCache:CVOpenGLESTextureCache = { var newTextureCache:CVOpenGLESTextureCache? = nil let err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nil, self.context, nil, &newTextureCache) return newTextureCache! @@ -80,7 +80,7 @@ public class OpenGLContext: SerialDispatch { // MARK: - // MARK: Device capabilities - func supportsTextureCaches() -> Bool { + public func supportsTextureCaches() -> Bool { #if targetEnvironment(simulator) return false // Simulator glitches out on use of texture caches #else From f97b610c24d0b12c4b37d99922260b56659ac773 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 16 Jul 2020 16:46:11 +0800 Subject: [PATCH 259/332] improve(MovieCache): make MovieCache api more reenterable --- framework/Source/iOS/MovieCache.swift | 60 +++++++++++++++------------ 1 file changed, 33 insertions(+), 27 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index bb7c41fc..932a8e4e 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -10,10 +10,13 @@ import AVFoundation public enum MovieCacheError: Error { case invalidState + case sameState case emptyMovieOutput + case movieOutputError(Error) } public class MovieCache: ImageConsumer, AudioEncodingTarget { + public typealias Completion = (Result) -> Void public let sources = SourceContainer() public let maximumInputs: UInt = 1 public private(set) var movieOutput: MovieOutput? @@ -47,19 +50,19 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { } } - public func startWriting(_ completionCallback:((_ error: Error?) -> Void)? = nil) { + public func startWriting(_ completionCallback: Completion? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in self?._startWriting(completionCallback) } } - public func stopWriting(_ completionCallback:((Error?) -> Void)? = nil) { + public func stopWriting(_ completionCallback: Completion? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in self?._stopWriting(completionCallback) } } - public func cancelWriting(_ completionCallback:(() -> Void)? = nil) { + public func cancelWriting(_ completionCallback: Completion? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in self?._cancelWriting(completionCallback) } @@ -108,8 +111,13 @@ private extension MovieCache { return state != .unknown && state != .idle } - func _tryTransitingState(to newState: State) -> Bool { - guard state != newState else { return true } + func _tryTransitingState(to newState: State, _ errorCallback: Completion? = nil) -> Result { + if state == newState { + // NOTE: for same state, just do nothing and callback + print("WARNING: Same state transition for:\(state)") + errorCallback?(.success(true)) + return .failure(.sameState) + } switch (state, newState) { case (.unknown, .idle), (.unknown, .caching), (.unknown, .writing), (.idle, .caching), (.idle, .writing), (.idle, .canceled), @@ -119,16 +127,17 @@ private extension MovieCache { (.canceled, .idle), (.canceled, .writing): debugPrint("state transite from:\(state) to:\(newState)") state = newState - return true + return .success(true) default: assertionFailure() print("ERROR: invalid state transition from:\(state) to:\(newState)") - return false + errorCallback?(.failure(.invalidState)) + return .failure(.invalidState) } } func _startCaching(duration: TimeInterval) { - guard _tryTransitingState(to: .caching) else { return } + guard case .success = _tryTransitingState(to: .caching) else { return } print("start caching") cacheBuffersDuration = duration } @@ -143,43 +152,40 @@ private extension MovieCache { self.movieOutput = movieOutput } - func _startWriting(_ completionCallback:((_ error: Error?) -> Void)? = nil) { - guard _tryTransitingState(to: .writing) else { - completionCallback?(MovieCacheError.invalidState) - return - } + func _startWriting(_ completionCallback: Completion? = nil) { + guard case .success = _tryTransitingState(to: .writing, completionCallback) else { return } guard movieOutput != nil else { print("movie output is not ready yet, waiting...") - completionCallback?(nil) + completionCallback?(.success(true)) return } print("start writing") movieOutput?.startRecording(sync: true) { _, error in - completionCallback?(error) + if let error = error { + completionCallback?(.failure(.movieOutputError(error))) + } else { + completionCallback?(.success(true)) + } } } - func _stopWriting(_ completionCallback:((Error?) -> Void)? = nil) { - guard _tryTransitingState(to: .stopped), movieOutput != nil else { - completionCallback?(MovieCacheError.invalidState) - return - } + func _stopWriting(_ completionCallback: Completion? = nil) { + guard case .success = _tryTransitingState(to: .stopped, completionCallback), movieOutput != nil else { return } print("stop writing. videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") movieOutput?.finishRecording(sync: true) { - completionCallback?(nil) + completionCallback?(.success(true)) } movieOutput = nil } - func _cancelWriting(_ completionCallback:(() -> Void)? = nil) { - guard _tryTransitingState(to: .canceled), let movieOutput = movieOutput else { - completionCallback?() + func _cancelWriting(_ completionCallback: Completion? = nil) { + defer { self.movieOutput = nil - return } + guard case .success = _tryTransitingState(to: .canceled, completionCallback), let movieOutput = movieOutput else { return } print("cancel writing") movieOutput.cancelRecording(sync: true) { - completionCallback?() + completionCallback?(.success(true)) } self.movieOutput = nil } @@ -188,7 +194,7 @@ private extension MovieCache { if needsCancel && state == .writing { _cancelWriting() } - guard _tryTransitingState(to: .idle) else { return } + guard case .success = _tryTransitingState(to: .idle) else { return } print("stop caching") _cleanBufferCaches() } From 4f2f33f035d31055a4895d9f679391a2839d79ff Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 16 Jul 2020 17:35:11 +0800 Subject: [PATCH 260/332] improve(MovieOutput): make startRecording reenterable --- framework/Source/iOS/MovieOutput.swift | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 6cccead2..7740829a 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -211,7 +211,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let block = { [weak self] () -> Void in do { guard let self = self else { return } - guard self.assetWriter.status != .cancelled else { + if self.assetWriter.status == .writing { + completionCallback?(true, nil) + return + } else if self.assetWriter.status == .cancelled { throw MovieOutputError.startWritingError(assetWriterError: nil) } From 4aa290cda962ed7e0363e7452e3b4c5845ea8677 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 21 Jul 2020 15:19:04 +0800 Subject: [PATCH 261/332] chore: change property scope --- framework/Source/iOS/Camera.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 2ae886e9..66f645d7 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -28,7 +28,7 @@ public enum PhysicalCameraLocation { } } - func captureDevicePosition() -> AVCaptureDevice.Position { + public func captureDevicePosition() -> AVCaptureDevice.Position { switch self { case .backFacing: return .back case .frontFacing: return .front From b3aa3db95b6d9b36fc2220f486c0bc6c838e792e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 28 Jul 2020 14:01:08 +0800 Subject: [PATCH 262/332] improve: update MovieCache state transition and update property scope --- framework/Source/iOS/Camera.swift | 2 +- framework/Source/iOS/MovieCache.swift | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 66f645d7..9381df79 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -87,7 +87,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public let targets = TargetContainer() public weak var delegate: CameraDelegate? public let captureSession:AVCaptureSession - public private(set) var inputCamera:AVCaptureDevice! + public var inputCamera:AVCaptureDevice! public private(set) var videoInput:AVCaptureDeviceInput! public let videoOutput:AVCaptureVideoDataOutput! public var microphone:AVCaptureDevice? diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 932a8e4e..4bf7ad8a 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -119,7 +119,7 @@ private extension MovieCache { return .failure(.sameState) } switch (state, newState) { - case (.unknown, .idle), (.unknown, .caching), (.unknown, .writing), + case (.unknown, .idle), (.unknown, .caching), (.unknown, .writing), (.unknown, .canceled), (.idle, .caching), (.idle, .writing), (.idle, .canceled), (.caching, .writing), (.caching, .stopped), (.caching, .canceled), (.caching, .idle), (.writing, .stopped), (.writing, .canceled), From bd17a488cacd556f0754c32424e7a594a46e9e5e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 4 Aug 2020 17:42:47 +0800 Subject: [PATCH 263/332] fix(MovieCache): fix MovieOutput is not started correctly in some cases --- framework/Source/iOS/MovieCache.swift | 68 +++++++++++++++++++------- framework/Source/iOS/MovieOutput.swift | 1 + 2 files changed, 50 insertions(+), 19 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 4bf7ad8a..e896eb32 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -8,11 +8,24 @@ import Foundation import AVFoundation -public enum MovieCacheError: Error { +public enum MovieCacheError: Error, Equatable, CustomStringConvertible { case invalidState case sameState case emptyMovieOutput case movieOutputError(Error) + + public var description: String { + switch self { + case .invalidState: return "invalidState" + case .sameState: return "sameState" + case .emptyMovieOutput: return "emptyMovieOutput" + case .movieOutputError: return "movieOutputError" + } + } + + public static func == (lhs: MovieCacheError, rhs: MovieCacheError) -> Bool { + return lhs.description == rhs.description + } } public class MovieCache: ImageConsumer, AudioEncodingTarget { @@ -33,6 +46,7 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { case canceled } public private(set) var state = State.unknown + private var writingCallback: Completion? public init() { print("MovieCache init") @@ -111,33 +125,35 @@ private extension MovieCache { return state != .unknown && state != .idle } - func _tryTransitingState(to newState: State, _ errorCallback: Completion? = nil) -> Result { + func _tryTransitingState(to newState: State, _ errorCallback: Completion? = nil) -> MovieCacheError? { if state == newState { // NOTE: for same state, just do nothing and callback print("WARNING: Same state transition for:\(state)") errorCallback?(.success(true)) - return .failure(.sameState) + return .sameState } switch (state, newState) { - case (.unknown, .idle), (.unknown, .caching), (.unknown, .writing), (.unknown, .canceled), - (.idle, .caching), (.idle, .writing), (.idle, .canceled), - (.caching, .writing), (.caching, .stopped), (.caching, .canceled), (.caching, .idle), - (.writing, .stopped), (.writing, .canceled), - (.stopped, .idle), (.stopped, .writing), (.stopped, .canceled), - (.canceled, .idle), (.canceled, .writing): + case (.unknown, .idle), (.unknown, .caching), (.unknown, .writing), + (.idle, .caching), (.idle, .writing), + (.caching, .writing), (.caching, .stopped), (.caching, .idle), + (.writing, .stopped), + (.stopped, .idle), (.stopped, .writing), + (.canceled, .idle), (.canceled, .writing), + (_, .canceled): // any state can transite to canceled debugPrint("state transite from:\(state) to:\(newState)") state = newState - return .success(true) + return nil default: assertionFailure() print("ERROR: invalid state transition from:\(state) to:\(newState)") errorCallback?(.failure(.invalidState)) - return .failure(.invalidState) + return .invalidState } } func _startCaching(duration: TimeInterval) { - guard case .success = _tryTransitingState(to: .caching) else { return } + let error = _tryTransitingState(to: .caching) + guard error == nil else { return } print("start caching") cacheBuffersDuration = duration } @@ -150,16 +166,24 @@ private extension MovieCache { } print("set movie output") self.movieOutput = movieOutput + if state == .writing { + print("it is already writing, start MovieOutput recording immediately") + _startMovieOutput(writingCallback) + } } func _startWriting(_ completionCallback: Completion? = nil) { - guard case .success = _tryTransitingState(to: .writing, completionCallback) else { return } + guard _tryTransitingState(to: .writing) == nil else { return } guard movieOutput != nil else { print("movie output is not ready yet, waiting...") - completionCallback?(.success(true)) + writingCallback = completionCallback return } print("start writing") + _startMovieOutput(completionCallback) + } + + func _startMovieOutput(_ completionCallback: Completion? = nil) { movieOutput?.startRecording(sync: true) { _, error in if let error = error { completionCallback?(.failure(.movieOutputError(error))) @@ -170,31 +194,37 @@ private extension MovieCache { } func _stopWriting(_ completionCallback: Completion? = nil) { - guard case .success = _tryTransitingState(to: .stopped, completionCallback), movieOutput != nil else { return } + guard _tryTransitingState(to: .stopped) == nil else { return } + guard movieOutput != nil else { return } print("stop writing. videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") movieOutput?.finishRecording(sync: true) { completionCallback?(.success(true)) } movieOutput = nil + writingCallback = nil } func _cancelWriting(_ completionCallback: Completion? = nil) { defer { - self.movieOutput = nil + movieOutput = nil + writingCallback = nil + } + guard _tryTransitingState(to: .canceled) == nil else { return } + guard let movieOutput = movieOutput else { + completionCallback?(.success(true)) + return } - guard case .success = _tryTransitingState(to: .canceled, completionCallback), let movieOutput = movieOutput else { return } print("cancel writing") movieOutput.cancelRecording(sync: true) { completionCallback?(.success(true)) } - self.movieOutput = nil } func _stopCaching(needsCancel: Bool) { if needsCancel && state == .writing { _cancelWriting() } - guard case .success = _tryTransitingState(to: .idle) else { return } + guard _tryTransitingState(to: .idle) == nil else { return } print("stop caching") _cleanBufferCaches() } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 7740829a..b3810dff 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -56,6 +56,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public weak var delegate: MovieOutputDelegate? public let url: URL + public var writerStatus: AVAssetWriter.Status { assetWriter.status } private let assetWriter:AVAssetWriter let assetWriterVideoInput:AVAssetWriterInput var assetWriterAudioInput:AVAssetWriterInput? From 6a6e2ec4dd61b4963ed8b8396937e2370f0bb965 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 5 Aug 2020 13:49:35 +0800 Subject: [PATCH 264/332] fix(MoviePlayer): fix didPlayToEnd is not accurate when currentItem is nil --- framework/Source/iOS/MoviePlayer.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 4befef1b..c815d9f1 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -65,7 +65,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { return asset.imageOrientation ?? .portrait } public var didPlayToEnd: Bool { - return currentItem?.currentTime() ?? .zero >= assetDuration + return currentItem == nil || (currentItem?.currentTime() ?? .zero >= assetDuration) } public var hasTarget: Bool { targets.count > 0 } From 646d89bff526f20130d5eadecc5b4a3a27b55392 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 14 Aug 2020 16:21:22 +0800 Subject: [PATCH 265/332] improve(MoviePlayer): remove currentTime call in log since it might stuck thread sometimes --- framework/Source/iOS/MoviePlayer.swift | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index c815d9f1..76c4c716 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -64,6 +64,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { guard let asset = asset else { return .portrait } return asset.imageOrientation ?? .portrait } + // NOTE: be careful, this property might block your thread since it needs to access currentTime public var didPlayToEnd: Bool { return currentItem == nil || (currentItem?.currentTime() ?? .zero >= assetDuration) } @@ -170,7 +171,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } remove(item) super.insert(item, after: afterItem) - print("[MoviePlayer] insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count)") + print("[MoviePlayer] insert new item(\(item.duration.seconds)s):\(item) afterItem:\(String(describing: afterItem)) enableVideoOutput:\(enableVideoOutput) itemsAfter:\(items().count)") } didNotifyEndedItem = nil } @@ -211,7 +212,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } else { super.replaceCurrentItem(with: item) } - print("[MoviePlayer] replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) enableVideoOutput:\(enableVideoOutput) currentTime:\(currentTime().seconds) itemsAfter:\(items().count) ") + print("[MoviePlayer] replace current item with newItem(\(item?.duration.seconds ?? 0)s)):\(String(describing: item)) enableVideoOutput:\(enableVideoOutput) itemsAfter:\(items().count) ") } public func replayLastItem() { @@ -222,25 +223,25 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } else { play() } - print("[MoviePlayer] replay last item:\(playerItem) currentTime:\(currentTime().seconds)") + print("[MoviePlayer] replay last item:\(playerItem)") } override public func remove(_ item: AVPlayerItem) { super.remove(item) pendingNewItems.removeAll { $0 == item } - print("[MoviePlayer] remove item:\(item) currentTime:\(currentTime().seconds)") + print("[MoviePlayer] remove item:\(item)") } override public func removeAllItems() { _stopLoopingIfNeeded() super.removeAllItems() pendingNewItems.removeAll() - print("[MoviePlayer] remove all items currentTime:\(currentTime().seconds)") + print("[MoviePlayer] remove all items") } override public func advanceToNextItem() { super.advanceToNextItem() - print("[MoviePlayer] advance to next item currentTime:\(currentTime().seconds)") + print("[MoviePlayer] advance to next item") } // MARK: - @@ -271,7 +272,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } isPlaying = true isProcessing = false - print("[MoviePlayer] start currentTime:\(currentTime().seconds) duration:\(String(describing: asset?.duration.seconds)) items:\(items())") + print("[MoviePlayer] start duration:\(String(describing: asset?.duration.seconds)) items:\(items())") _setupDisplayLinkIfNeeded() _resetTimeObservers() didNotifyEndedItem = nil @@ -297,19 +298,19 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public func resume() { isPlaying = true rate = playrate - print("movie player resume currentTime:\(currentTime().seconds) \(String(describing: asset))") + print("movie player resume \(String(describing: asset))") } override public func pause() { isPlaying = false guard rate != 0 else { return } - print("movie player pause currentTime:\(currentTime().seconds) \(String(describing: asset))") + print("movie player pause \(String(describing: asset))") super.pause() } public func stop() { pause() - print("movie player stop currentTime:\(currentTime().seconds) \(String(describing: asset))") + print("movie player stop \(String(describing: asset))") _timeObserversUpdate { [weak self] in self?.timeObserversQueue.removeAll() } From 51dfa010837c56eaeb5d2cddc412a35875059902 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 19 Aug 2020 02:48:24 +0800 Subject: [PATCH 266/332] improve(MovieCache): improve state machine and add property --- framework/Source/iOS/MovieCache.swift | 99 +++++++++++++++++++++----- framework/Source/iOS/MovieOutput.swift | 1 + 2 files changed, 83 insertions(+), 17 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index e896eb32..6a23e107 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -47,20 +47,51 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { } public private(set) var state = State.unknown private var writingCallback: Completion? + public var isReadyToWrite: Bool { + guard let movieOutput = movieOutput else { return false } + return movieOutput.writerStatus == .unknown + } public init() { print("MovieCache init") } + deinit { + if movieOutput?.writerStatus == .writing { + print("[WARNING] movieOutput is still writing, cancel it now") + movieOutput?.cancelRecording() + } + } + public func startCaching(duration: TimeInterval) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in self?._startCaching(duration: duration) } } - public func setMovieOutput(_ movieOutput: MovieOutput) { + public func setMovieOutputIfNotReady(url: URL, + size: Size, + fileType:AVFileType = .mov, + liveVideo:Bool = false, + videoSettings:[String:Any]? = nil, + videoNaturalTimeScale:CMTimeScale? = nil, + optimizeForNetworkUse: Bool = false, + disablePixelBufferAttachments: Bool = true, + audioSettings:[String:Any]? = nil, + audioSourceFormatHint:CMFormatDescription? = nil, + _ configure: ((MovieOutput) -> Void)? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in - self?._setMovieOutput(movieOutput) + self?._setMovieOutputIfNotReady(url: url, + size: size, + fileType: fileType, + liveVideo: liveVideo, + videoSettings: videoSettings, + videoNaturalTimeScale: videoNaturalTimeScale, + optimizeForNetworkUse: optimizeForNetworkUse, + disablePixelBufferAttachments: disablePixelBufferAttachments, + audioSettings: audioSettings, + audioSourceFormatHint: audioSourceFormatHint, + configure) } } @@ -137,8 +168,8 @@ private extension MovieCache { (.idle, .caching), (.idle, .writing), (.caching, .writing), (.caching, .stopped), (.caching, .idle), (.writing, .stopped), - (.stopped, .idle), (.stopped, .writing), - (.canceled, .idle), (.canceled, .writing), + (.stopped, .idle), (.stopped, .caching), (.stopped, .writing), + (.canceled, .idle), (.canceled, .caching), (.canceled, .writing), (_, .canceled): // any state can transite to canceled debugPrint("state transite from:\(state) to:\(newState)") state = newState @@ -158,17 +189,47 @@ private extension MovieCache { cacheBuffersDuration = duration } - func _setMovieOutput(_ movieOutput: MovieOutput) { - guard state != .writing || self.movieOutput == nil else { - print("Should not set MovieOutput during writing") - assertionFailure("Should not set MovieOutput during writing") + func _setMovieOutputIfNotReady(url: URL, + size: Size, + fileType: AVFileType = .mov, + liveVideo: Bool = false, + videoSettings: [String:Any]? = nil, + videoNaturalTimeScale: CMTimeScale? = nil, + optimizeForNetworkUse: Bool = false, + disablePixelBufferAttachments: Bool = true, + audioSettings: [String:Any]? = nil, + audioSourceFormatHint: CMFormatDescription? = nil, + _ configure: ((MovieOutput) -> Void)? = nil) { + guard !isReadyToWrite else { + print("No need to create MovieOutput") return } - print("set movie output") - self.movieOutput = movieOutput - if state == .writing { - print("it is already writing, start MovieOutput recording immediately") - _startMovieOutput(writingCallback) + if state == .writing, let oldMovieOutput = movieOutput { + _cancelWriting() { _ in + print("Remove canceled video url:\(oldMovieOutput.url)") + try? FileManager.default.removeItem(at: oldMovieOutput.url) + } + } + do { + let newMovieOutput = try MovieOutput(URL: url, + size: size, + fileType: fileType, + liveVideo: liveVideo, + videoSettings: videoSettings, + videoNaturalTimeScale: videoNaturalTimeScale, + optimizeForNetworkUse: optimizeForNetworkUse, + disablePixelBufferAttachments: disablePixelBufferAttachments, + audioSettings: audioSettings, + audioSourceFormatHint: audioSourceFormatHint) + self.movieOutput = newMovieOutput + print("set movie output") + configure?(newMovieOutput) + if state == .writing { + print("it is already writing, start MovieOutput recording immediately") + _startMovieOutput(writingCallback) + } + } catch { + print("[ERROR] can't create movie output") } } @@ -195,12 +256,16 @@ private extension MovieCache { func _stopWriting(_ completionCallback: Completion? = nil) { guard _tryTransitingState(to: .stopped) == nil else { return } - guard movieOutput != nil else { return } + guard let movieOutput = movieOutput else { return } print("stop writing. videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") - movieOutput?.finishRecording(sync: true) { - completionCallback?(.success(true)) + movieOutput.finishRecording(sync: true) { + if let error = movieOutput.writerError { + completionCallback?(.failure(.movieOutputError(error))) + } else { + completionCallback?(.success(true)) + } } - movieOutput = nil + self.movieOutput = nil writingCallback = nil } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index b3810dff..9774d143 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -57,6 +57,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public let url: URL public var writerStatus: AVAssetWriter.Status { assetWriter.status } + public var writerError: Error? { assetWriter.error } private let assetWriter:AVAssetWriter let assetWriterVideoInput:AVAssetWriterInput var assetWriterAudioInput:AVAssetWriterInput? From d516ab93f9388fd084619428717d28e7e446720e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 26 Aug 2020 10:34:08 +0800 Subject: [PATCH 267/332] improve(MovieCache): add completion callback for stopCaching --- framework/Source/iOS/MovieCache.swift | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 6a23e107..d3300e85 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -113,9 +113,9 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { } } - public func stopCaching(needsCancel: Bool = false) { + public func stopCaching(needsCancel: Bool = false, _ completionCallback: Completion? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in - self?._stopCaching(needsCancel: needsCancel) + self?._stopCaching(needsCancel: needsCancel, completionCallback) } } } @@ -285,10 +285,13 @@ private extension MovieCache { } } - func _stopCaching(needsCancel: Bool) { + func _stopCaching(needsCancel: Bool, _ completionCallback: Completion?) { if needsCancel && state == .writing { _cancelWriting() } + defer { + completionCallback?(.success(true)) + } guard _tryTransitingState(to: .idle) == nil else { return } print("stop caching") _cleanBufferCaches() From 2c9cc4793b83cd537c2d0bf69ec1a76717a77a49 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 26 Aug 2020 21:58:48 +0800 Subject: [PATCH 268/332] fix(MovieOutput): fix possible crash if movie output was canceled --- framework/Source/iOS/MovieOutput.swift | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 9774d143..47302c7d 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -402,6 +402,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { pixelBuffer = nil pixelBufferPoolSemaphore.wait() + guard assetWriterPixelBufferInput.pixelBufferPool != nil else { + print("WARNING: PixelBufferInput pool is nil") + return false + } let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) pixelBufferPoolSemaphore.signal() guard pixelBuffer != nil && pixelBufferStatus == kCVReturnSuccess else { From 9a0e10630b936f34f171536260407982a5d90656 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 31 Aug 2020 23:15:28 +0800 Subject: [PATCH 269/332] improve(MovieCache): improve stopWriting callback and state machine --- framework/Source/iOS/MovieCache.swift | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index d3300e85..4f374d82 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -101,7 +101,7 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { } } - public func stopWriting(_ completionCallback: Completion? = nil) { + public func stopWriting(_ completionCallback: ((URL?, MovieCacheError?) -> Void)? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in self?._stopWriting(completionCallback) } @@ -169,7 +169,7 @@ private extension MovieCache { (.caching, .writing), (.caching, .stopped), (.caching, .idle), (.writing, .stopped), (.stopped, .idle), (.stopped, .caching), (.stopped, .writing), - (.canceled, .idle), (.canceled, .caching), (.canceled, .writing), + (.canceled, .idle), (.canceled, .caching), (.canceled, .writing), (.canceled, .stopped), (_, .canceled): // any state can transite to canceled debugPrint("state transite from:\(state) to:\(newState)") state = newState @@ -204,7 +204,8 @@ private extension MovieCache { print("No need to create MovieOutput") return } - if state == .writing, let oldMovieOutput = movieOutput { + if let oldMovieOutput = movieOutput, movieOutput?.writerStatus == .writing { + assertionFailure("MovieOutput is still writing, should not set MovieOutput. state:\(state)") _cancelWriting() { _ in print("Remove canceled video url:\(oldMovieOutput.url)") try? FileManager.default.removeItem(at: oldMovieOutput.url) @@ -254,15 +255,21 @@ private extension MovieCache { } } - func _stopWriting(_ completionCallback: Completion? = nil) { - guard _tryTransitingState(to: .stopped) == nil else { return } - guard let movieOutput = movieOutput else { return } + func _stopWriting(_ completionCallback: ((URL?, MovieCacheError?) -> Void)? = nil) { + guard _tryTransitingState(to: .stopped) == nil else { + completionCallback?(movieOutput?.url, .invalidState) + return + } + guard let movieOutput = movieOutput else { + completionCallback?(self.movieOutput?.url, .emptyMovieOutput) + return + } print("stop writing. videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") movieOutput.finishRecording(sync: true) { if let error = movieOutput.writerError { - completionCallback?(.failure(.movieOutputError(error))) + completionCallback?(movieOutput.url, .movieOutputError(error)) } else { - completionCallback?(.success(true)) + completionCallback?(movieOutput.url, nil) } } self.movieOutput = nil From be1910b5f54053c1222094fe46e8f14254b0b5c4 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 31 Aug 2020 23:17:23 +0800 Subject: [PATCH 270/332] improve(camera): improve deviceType setting logic together with location to reduce flashing, and use actual value from AVCaptureDevice in getter --- framework/Source/iOS/Camera.swift | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 9381df79..981b7324 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -63,7 +63,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var location:PhysicalCameraLocation { didSet { if oldValue == location { return } - configureDeviceInput() + configureDeviceInput(location: location, deviceType: deviceType) } } public var runBenchmark:Bool = false @@ -94,11 +94,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var audioInput:AVCaptureDeviceInput? public var audioOutput:AVCaptureAudioDataOutput? public var dontDropFrames: Bool = false - public var deviceType = AVCaptureDevice.DeviceType.builtInWideAngleCamera { - didSet { - guard oldValue.rawValue != deviceType.rawValue else { return } - configureDeviceInput() - } + public var deviceType: AVCaptureDevice.DeviceType { + return inputCamera.deviceType } public var backCameraStableMode: AVCaptureVideoStabilizationMode = .standard { didSet { @@ -140,7 +137,6 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.captureSession = AVCaptureSession() self.captureSession.beginConfiguration() captureSession.sessionPreset = sessionPreset - self.deviceType = deviceType if let cameraDevice = cameraDevice { self.inputCamera = cameraDevice @@ -248,7 +244,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode:stableMode) } - func configureDeviceInput() { + public func configureDeviceInput(location: PhysicalCameraLocation, deviceType: AVCaptureDevice.DeviceType) { guard let device = location.device(deviceType) else { fatalError("ERROR: Can't find video devices for \(location)") } @@ -262,6 +258,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer inputCamera = device captureSession.addInput(newVideoInput) videoInput = newVideoInput + self.location = location configureStabilization() } else { print("Can't add video input") From ef2ccabb4d34c5584f4f16fa6476b6c68f44e629 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sat, 5 Sep 2020 00:50:20 +0800 Subject: [PATCH 271/332] improve: expose MovieOutput.startFrameTime and change MovieCache stop writing callback to MovieOutput --- framework/Source/iOS/MovieCache.swift | 12 ++++++------ framework/Source/iOS/MovieOutput.swift | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 4f374d82..e3f2b336 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -101,7 +101,7 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { } } - public func stopWriting(_ completionCallback: ((URL?, MovieCacheError?) -> Void)? = nil) { + public func stopWriting(_ completionCallback: ((MovieOutput?, MovieCacheError?) -> Void)? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in self?._stopWriting(completionCallback) } @@ -255,21 +255,21 @@ private extension MovieCache { } } - func _stopWriting(_ completionCallback: ((URL?, MovieCacheError?) -> Void)? = nil) { + func _stopWriting(_ completionCallback: ((MovieOutput?, MovieCacheError?) -> Void)? = nil) { guard _tryTransitingState(to: .stopped) == nil else { - completionCallback?(movieOutput?.url, .invalidState) + completionCallback?(movieOutput, .invalidState) return } guard let movieOutput = movieOutput else { - completionCallback?(self.movieOutput?.url, .emptyMovieOutput) + completionCallback?(self.movieOutput, .emptyMovieOutput) return } print("stop writing. videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") movieOutput.finishRecording(sync: true) { if let error = movieOutput.writerError { - completionCallback?(movieOutput.url, .movieOutputError(error)) + completionCallback?(movieOutput, .movieOutputError(error)) } else { - completionCallback?(movieOutput.url, nil) + completionCallback?(movieOutput, nil) } } self.movieOutput = nil diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 47302c7d..5977e7c1 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -67,7 +67,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var videoEncodingIsFinished = false var audioEncodingIsFinished = false var markIsFinishedAfterProcessing = false - private var startFrameTime: CMTime? + public private(set) var startFrameTime: CMTime? public private(set) var recordedDuration: CMTime? private var previousFrameTime: CMTime? var encodingLiveVideo:Bool { From c9afe4d967ca1ed46ed60c094ce0051d845af6bc Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 14 Sep 2020 00:04:48 +0800 Subject: [PATCH 272/332] improve(MovieCache): state machine for edge case --- framework/Source/iOS/MovieCache.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index e3f2b336..5a64fe60 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -164,7 +164,7 @@ private extension MovieCache { return .sameState } switch (state, newState) { - case (.unknown, .idle), (.unknown, .caching), (.unknown, .writing), + case (.unknown, .idle), (.unknown, .caching), (.unknown, .writing), (.unknown, .stopped), (.idle, .caching), (.idle, .writing), (.caching, .writing), (.caching, .stopped), (.caching, .idle), (.writing, .stopped), From c26a4988d59587ddac6cf668cac488215a3c1334 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 14 Sep 2020 00:05:14 +0800 Subject: [PATCH 273/332] improve(transcoding): reduce memory usage by wrapping auto release pool for reading video buffers --- framework/Source/iOS/MovieInput.swift | 41 ++++++++++++++------------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index efa2d699..5e334aa8 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -280,32 +280,33 @@ public class MovieInput: ImageSource { while(assetReader.status == .reading) { if(thread.isCancelled) { break } - - if let movieOutput = self.synchronizedMovieOutput { - self.conditionLock.lock() - if(self.readingShouldWait) { - self.synchronizedEncodingDebugPrint("Disable reading") - self.conditionLock.wait() - self.synchronizedEncodingDebugPrint("Enable reading") + autoreleasepool { + if let movieOutput = self.synchronizedMovieOutput { + self.conditionLock.lock() + if(self.readingShouldWait) { + self.synchronizedEncodingDebugPrint("Disable reading") + self.conditionLock.wait() + self.synchronizedEncodingDebugPrint("Enable reading") + } + self.conditionLock.unlock() + + if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData) { + self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) + } + if(movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { + if let readerAudioTrackOutput = readerAudioTrackOutput { + self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) + } + } } - self.conditionLock.unlock() - - if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData) { + else { self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) - } - if(movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { - if let readerAudioTrackOutput = readerAudioTrackOutput { + if let readerAudioTrackOutput = readerAudioTrackOutput, + self.audioEncodingTarget?.readyForNextAudioBuffer() ?? true { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } } } - else { - self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) - if let readerAudioTrackOutput = readerAudioTrackOutput, - self.audioEncodingTarget?.readyForNextAudioBuffer() ?? true { - self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) - } - } } assetReader.cancelReading() From 804f826064e6d79fe231ae45c55f2e3be44333d2 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 14 Sep 2020 00:06:05 +0800 Subject: [PATCH 274/332] improve(MovieOutput): add videoID and improve logging --- framework/Source/iOS/MovieOutput.swift | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 5977e7c1..f6cd16cd 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -56,6 +56,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public weak var delegate: MovieOutputDelegate? public let url: URL + public var videoID: String? public var writerStatus: AVAssetWriter.Status { assetWriter.status } public var writerError: Error? { assetWriter.error } private let assetWriter:AVAssetWriter @@ -539,14 +540,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // we don't want to risk letting framebuffers pile up in between poll intervals. usleep(100000) // 0.1 seconds } - let time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - synchronizedEncodingDebugPrint("appending video sample buffer at:\(time.seconds)") + synchronizedEncodingDebugPrint("appending video sample buffer at:\(frameTime.seconds)") guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") return false } if !assetWriterVideoInput.isReadyForMoreMediaData { - print("WARNING: video input is not ready at time: \(time))") + print("WARNING: video input is not ready at time: \(frameTime))") return false } if let ciFilter = ciFilter { @@ -559,15 +559,15 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { do { var appendResult = false try NSObject.catchException { - appendResult = bufferInput.append(buffer, withPresentationTime: time) + appendResult = bufferInput.append(buffer, withPresentationTime: frameTime) } if (!appendResult) { - print("WARNING: Trouble appending pixel buffer at time: \(time) \(String(describing: assetWriter.error))") + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: assetWriter.error))") return false } totalVideoFramesAppended += 1 } catch { - print("WARNING: Trouble appending video sample buffer at time: \(time) \(error)") + print("WARNING: Trouble appending video sample buffer at time: \(frameTime) \(error)") return false } return true From 99aa0c33038c9435d948c25df7fd51ca228200af Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 15 Sep 2020 15:04:54 +0800 Subject: [PATCH 275/332] improve(MovieCache): use videoID to identify which MovieOutput should be affected --- framework/Source/iOS/MovieCache.swift | 64 ++++++++++++++++++--------- 1 file changed, 42 insertions(+), 22 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 5a64fe60..84e9fae0 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -12,6 +12,7 @@ public enum MovieCacheError: Error, Equatable, CustomStringConvertible { case invalidState case sameState case emptyMovieOutput + case unmatchedVideoID case movieOutputError(Error) public var description: String { @@ -20,6 +21,7 @@ public enum MovieCacheError: Error, Equatable, CustomStringConvertible { case .sameState: return "sameState" case .emptyMovieOutput: return "emptyMovieOutput" case .movieOutputError: return "movieOutputError" + case .unmatchedVideoID: return "unmatchedVideoID" } } @@ -29,7 +31,7 @@ public enum MovieCacheError: Error, Equatable, CustomStringConvertible { } public class MovieCache: ImageConsumer, AudioEncodingTarget { - public typealias Completion = (Result) -> Void + public typealias Completion = (Result) -> Void public let sources = SourceContainer() public let maximumInputs: UInt = 1 public private(set) var movieOutput: MovieOutput? @@ -51,6 +53,7 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { guard let movieOutput = movieOutput else { return false } return movieOutput.writerStatus == .unknown } + private var startingVideoID: String? public init() { print("MovieCache init") @@ -95,21 +98,21 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { } } - public func startWriting(_ completionCallback: Completion? = nil) { + public func startWriting(videoID: String?, _ completionCallback: Completion? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in - self?._startWriting(completionCallback) + self?._startWriting(videoID: videoID, completionCallback) } } - public func stopWriting(_ completionCallback: ((MovieOutput?, MovieCacheError?) -> Void)? = nil) { + public func stopWriting(videoID: String?, _ completionCallback: ((MovieOutput?, MovieCacheError?) -> Void)? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in - self?._stopWriting(completionCallback) + self?._stopWriting(videoID: videoID, completionCallback) } } - public func cancelWriting(_ completionCallback: Completion? = nil) { + public func cancelWriting(videoID: String?, _ completionCallback: Completion? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in - self?._cancelWriting(completionCallback) + self?._cancelWriting(videoID: videoID, completionCallback) } } @@ -160,7 +163,7 @@ private extension MovieCache { if state == newState { // NOTE: for same state, just do nothing and callback print("WARNING: Same state transition for:\(state)") - errorCallback?(.success(true)) + errorCallback?(.success(movieOutput)) return .sameState } switch (state, newState) { @@ -206,7 +209,7 @@ private extension MovieCache { } if let oldMovieOutput = movieOutput, movieOutput?.writerStatus == .writing { assertionFailure("MovieOutput is still writing, should not set MovieOutput. state:\(state)") - _cancelWriting() { _ in + _cancelWriting(videoID: nil) { _ in print("Remove canceled video url:\(oldMovieOutput.url)") try? FileManager.default.removeItem(at: oldMovieOutput.url) } @@ -227,41 +230,52 @@ private extension MovieCache { configure?(newMovieOutput) if state == .writing { print("it is already writing, start MovieOutput recording immediately") - _startMovieOutput(writingCallback) + _startMovieOutput(videoID: startingVideoID, writingCallback) + startingVideoID = nil } } catch { print("[ERROR] can't create movie output") } } - func _startWriting(_ completionCallback: Completion? = nil) { + func _startWriting(videoID: String?, _ completionCallback: Completion? = nil) { guard _tryTransitingState(to: .writing) == nil else { return } guard movieOutput != nil else { print("movie output is not ready yet, waiting...") writingCallback = completionCallback + startingVideoID = videoID return } print("start writing") - _startMovieOutput(completionCallback) + _startMovieOutput(videoID: videoID, completionCallback) } - func _startMovieOutput(_ completionCallback: Completion? = nil) { - movieOutput?.startRecording(sync: true) { _, error in + func _startMovieOutput(videoID: String?, _ completionCallback: Completion? = nil) { + guard let movieOutput = movieOutput else { + completionCallback?(.failure(.emptyMovieOutput)) + return + } + movieOutput.videoID = videoID + movieOutput.startRecording(sync: true) { _, error in if let error = error { completionCallback?(.failure(.movieOutputError(error))) } else { - completionCallback?(.success(true)) + completionCallback?(.success(movieOutput)) } } } - func _stopWriting(_ completionCallback: ((MovieOutput?, MovieCacheError?) -> Void)? = nil) { + func _stopWriting(videoID: String?, _ completionCallback: ((MovieOutput?, MovieCacheError?) -> Void)? = nil) { + guard videoID == movieOutput?.videoID else { + completionCallback?(movieOutput, .unmatchedVideoID) + return + } guard _tryTransitingState(to: .stopped) == nil else { completionCallback?(movieOutput, .invalidState) return } guard let movieOutput = movieOutput else { - completionCallback?(self.movieOutput, .emptyMovieOutput) + completionCallback?(nil, .emptyMovieOutput) return } print("stop writing. videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") @@ -276,28 +290,34 @@ private extension MovieCache { writingCallback = nil } - func _cancelWriting(_ completionCallback: Completion? = nil) { + func _cancelWriting(videoID: String?, _ completionCallback: Completion? = nil) { + guard videoID == movieOutput?.videoID else { + completionCallback?(.failure(.unmatchedVideoID)) + return + } defer { movieOutput = nil writingCallback = nil } guard _tryTransitingState(to: .canceled) == nil else { return } guard let movieOutput = movieOutput else { - completionCallback?(.success(true)) + completionCallback?(.success(self.movieOutput)) return } print("cancel writing") movieOutput.cancelRecording(sync: true) { - completionCallback?(.success(true)) + completionCallback?(.success(movieOutput)) } } func _stopCaching(needsCancel: Bool, _ completionCallback: Completion?) { + let movieOutput = self.movieOutput if needsCancel && state == .writing { - _cancelWriting() + _cancelWriting(videoID: nil) + startingVideoID = nil } defer { - completionCallback?(.success(true)) + completionCallback?(.success(movieOutput)) } guard _tryTransitingState(to: .idle) == nil else { return } print("stop caching") From 7f7e21ab9ed576467687422a727cb2d0cab46968 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 15 Sep 2020 15:21:12 +0800 Subject: [PATCH 276/332] chore(MovieCache): improve logging --- framework/Source/iOS/MovieCache.swift | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 84e9fae0..3e1165ab 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -229,7 +229,7 @@ private extension MovieCache { print("set movie output") configure?(newMovieOutput) if state == .writing { - print("it is already writing, start MovieOutput recording immediately") + print("it is already writing, start MovieOutput recording immediately, videoID:\(String(describing: startingVideoID))") _startMovieOutput(videoID: startingVideoID, writingCallback) startingVideoID = nil } @@ -246,7 +246,7 @@ private extension MovieCache { startingVideoID = videoID return } - print("start writing") + print("start writing, videoID:\(String(describing: videoID))") _startMovieOutput(videoID: videoID, completionCallback) } @@ -267,6 +267,7 @@ private extension MovieCache { func _stopWriting(videoID: String?, _ completionCallback: ((MovieOutput?, MovieCacheError?) -> Void)? = nil) { guard videoID == movieOutput?.videoID else { + print("stopWriting failed. Unmatched videoID:\(String(describing: videoID)) movieOutput?.videoID:\(String(describing: movieOutput?.videoID))") completionCallback?(movieOutput, .unmatchedVideoID) return } @@ -278,7 +279,7 @@ private extension MovieCache { completionCallback?(nil, .emptyMovieOutput) return } - print("stop writing. videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") + print("stop writing. videoID:\(String(describing: videoID)) videoFramebuffers:\(framebufferCache.count) audioSampleBuffers:\(audioSampleBufferCache.count) videoSampleBuffers:\(videoSampleBufferCache.count)") movieOutput.finishRecording(sync: true) { if let error = movieOutput.writerError { completionCallback?(movieOutput, .movieOutputError(error)) @@ -292,6 +293,7 @@ private extension MovieCache { func _cancelWriting(videoID: String?, _ completionCallback: Completion? = nil) { guard videoID == movieOutput?.videoID else { + print("cancelWriting failed. Unmatched videoID:\(String(describing: videoID)) movieOutput?.videoID:\(String(describing: movieOutput?.videoID))") completionCallback?(.failure(.unmatchedVideoID)) return } @@ -304,7 +306,7 @@ private extension MovieCache { completionCallback?(.success(self.movieOutput)) return } - print("cancel writing") + print("cancel writing, videoID:\(String(describing: videoID))") movieOutput.cancelRecording(sync: true) { completionCallback?(.success(movieOutput)) } From 1b2d99ca46fda1bfb280f9bafe1cc1b395b5206a Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 16 Sep 2020 17:30:26 +0800 Subject: [PATCH 277/332] fix: fix compile error on simulator --- framework/Source/iOS/Camera.swift | 2 ++ 1 file changed, 2 insertions(+) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 981b7324..5d11d872 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -172,12 +172,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if captureAsYUV { supportsFullYUVRange = false + #if !targetEnvironment(simulator) let supportedPixelFormats = videoOutput.availableVideoPixelFormatTypes for currentPixelFormat in supportedPixelFormats { if currentPixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange { supportsFullYUVRange = true } } + #endif if (supportsFullYUVRange) { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} From edcd1c3168bcb156904c236a4d809bc4be872e23 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 21 Sep 2020 23:56:13 +0800 Subject: [PATCH 278/332] tweak(Camera): disable P3 color by default since it cannot be supported in OpenGL ES 2.0 now --- framework/Source/iOS/Camera.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 5d11d872..9903d7a5 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -153,6 +153,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } + captureSession.automaticallyConfiguresCaptureDeviceForWideColor = false do { self.videoInput = try AVCaptureDeviceInput(device:inputCamera) } catch { From 88fe2a75e3550f861551fdb075a61ade45801a37 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 22 Sep 2020 23:36:40 +0800 Subject: [PATCH 279/332] fix(RenderView): fix possible stuck when release in main thread --- framework/Source/iOS/RenderView.swift | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index e58c25a9..83315e87 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -81,8 +81,17 @@ public class RenderView:UIView, ImageConsumer { } deinit { - sharedImageProcessingContext.runOperationSynchronously{ - destroyDisplayFramebuffer() + let strongDisplayFramebuffer = displayFramebuffer + let strongDisplayRenderbuffer = displayRenderbuffer + sharedImageProcessingContext.runOperationAsynchronously { + if let displayFramebuffer = strongDisplayFramebuffer { + var temporaryFramebuffer = displayFramebuffer + glDeleteFramebuffers(1, &temporaryFramebuffer) + } + if let displayRenderbuffer = strongDisplayRenderbuffer { + var temporaryRenderbuffer = displayRenderbuffer + glDeleteRenderbuffers(1, &temporaryRenderbuffer) + } } } From 3aad160e0c5bff8bb96631aeed682a6e844640c7 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 12 Oct 2020 17:04:22 +0800 Subject: [PATCH 280/332] chore: add debug log for frame dropping --- framework/Source/iOS/Camera.swift | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 9903d7a5..a2979e72 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -417,6 +417,10 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } } + + public func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + debugPrint("dropped a video frame from camera") + } public func startCapture() { self.numberOfFramesCaptured = 0 From b5d0302d9e9ccdba40d3d43b9ce1a92260ec89b6 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 14 Oct 2020 16:22:29 +0800 Subject: [PATCH 281/332] fix(MovieOutput): fix encoding failed with -16364 error when appending duplicated frames --- framework/Source/iOS/MovieOutput.swift | 35 +++++++++++++++++--------- 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index f6cd16cd..d8153185 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -99,6 +99,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public private(set) var cacheBuffersDuration: TimeInterval = 0 public let disablePixelBufferAttachments: Bool private var pixelBufferPoolSemaphore = DispatchSemaphore(value: 1) + private var writtenSampleTimes = Set() var synchronizedEncodingDebug = false public private(set) var totalVideoFramesAppended = 0 @@ -386,12 +387,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return false } - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. - guard frameTime != previousFrameTime else { - print("WARNING: frameTime is as same as previousFrameTIme") - return true - } - if previousFrameTime == nil { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. assetWriter.startSession(atSourceTime: frameTime) @@ -429,6 +424,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { videoEncodingIsFinished = true } } + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard !_checkSampleTimeDuplicated(frameTime) else { return true } + let bufferInput = assetWriterPixelBufferInput var appendResult = false synchronizedEncodingDebugPrint("appending video framebuffer at:\(frameTime.seconds)") @@ -453,6 +452,20 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } + func _checkSampleTimeDuplicated(_ sampleTime: CMTime) -> Bool { + let sampleTimeInSeconds = sampleTime.seconds + if writtenSampleTimes.contains(sampleTimeInSeconds) { + print("WARNING: sampleTime:\(sampleTime) is duplicated, dropped!") + return true + } + // Avoid too large collection + if writtenSampleTimes.count > 100 { + writtenSampleTimes.removeAll() + } + writtenSampleTimes.insert(sampleTimeInSeconds) + return false + } + func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) throws { // Is this the first pixel buffer we have recieved? // NOTE: this will cause strange frame brightness blinking for the first few seconds, be careful about using this. @@ -513,12 +526,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. - guard (frameTime != previousFrameTime) else { - print("Cannot get timestamp from framebuffer, dropping frame") - return false - } - if previousFrameTime == nil { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. let startFrameTime = frameTime @@ -549,6 +556,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { print("WARNING: video input is not ready at time: \(frameTime))") return false } + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard !_checkSampleTimeDuplicated(frameTime) else { return true } + if let ciFilter = ciFilter { let originalImage = CIImage(cvPixelBuffer: buffer) if let outputImage = ciFilter.applyFilter(on: originalImage), let ciContext = cpuCIContext { From b7703b238e1227aeadd4a45a4f10cd1436a09bc1 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 19 Oct 2020 18:42:52 +0800 Subject: [PATCH 282/332] fix(MovieCache): should not cancel recording if there is already a movie recording --- framework/Source/iOS/MovieCache.swift | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 3e1165ab..ff05aca1 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -207,12 +207,9 @@ private extension MovieCache { print("No need to create MovieOutput") return } - if let oldMovieOutput = movieOutput, movieOutput?.writerStatus == .writing { - assertionFailure("MovieOutput is still writing, should not set MovieOutput. state:\(state)") - _cancelWriting(videoID: nil) { _ in - print("Remove canceled video url:\(oldMovieOutput.url)") - try? FileManager.default.removeItem(at: oldMovieOutput.url) - } + if let currentMovieOutput = movieOutput, movieOutput?.writerStatus == .writing { + print("MovieOutput is still writing, skip set MovieOutput. state:\(state) currentURL:\(currentMovieOutput.url) newURL:\(url)") + return } do { let newMovieOutput = try MovieOutput(URL: url, From 9d7576f62f1a028ba6c4b8b19b52237545fed6cc Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 27 Oct 2020 14:22:36 +0800 Subject: [PATCH 283/332] improve: use CACurrentMediaTime for better time percision --- framework/Source/Operations/HarrisCornerDetector.swift | 4 ++-- framework/Source/iOS/Camera.swift | 10 +++++----- framework/Source/iOS/MovieInput.swift | 2 +- framework/Source/iOS/MoviePlayer.swift | 4 ++-- framework/Source/iOS/PictureInput.swift | 2 -- 5 files changed, 10 insertions(+), 12 deletions(-) diff --git a/framework/Source/Operations/HarrisCornerDetector.swift b/framework/Source/Operations/HarrisCornerDetector.swift index abb031cf..4ebdea8a 100644 --- a/framework/Source/Operations/HarrisCornerDetector.swift +++ b/framework/Source/Operations/HarrisCornerDetector.swift @@ -60,7 +60,7 @@ func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) // var rawImagePixels = [UInt8](count:imageByteSize, repeatedValue:0) -// let startTime = CFAbsoluteTimeGetCurrent() +// let startTime = CACurrentMediaTime() let rawImagePixels = UnsafeMutablePointer.allocate(capacity:imageByteSize) // -Onone, [UInt8] array: 30 ms for 720p frame on Retina iMac @@ -90,7 +90,7 @@ func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { rawImagePixels.deallocate() -// print("Harris extraction frame time: \(CFAbsoluteTimeGetCurrent() - startTime)") +// print("Harris extraction frame time: \(CACurrentMediaTime() - startTime)") return corners } diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index a2979e72..506084ff 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -123,7 +123,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 var framesSinceLastCheck = 0 - var lastCheckTime = CFAbsoluteTimeGetCurrent() + var lastCheckTime = CACurrentMediaTime() var captureSessionRestartAttempts = 0 @@ -324,7 +324,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.frameRenderingSemaphore.signal() } } - let startTime = CFAbsoluteTimeGetCurrent() + let startTime = CACurrentMediaTime() guard let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer) else { print("Warning: cannot get imageBuffer") return @@ -399,7 +399,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if self.runBenchmark { self.numberOfFramesCaptured += 1 if (self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore) { - let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) + let currentFrameTime = (CACurrentMediaTime() - startTime) self.totalFrameTimeDuringCapture += currentFrameTime print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured - initialBenchmarkFramesToIgnore)) ms") print("Current frame time : \(1000.0 * currentFrameTime) ms") @@ -407,8 +407,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } if self.logFPS { - if ((CFAbsoluteTimeGetCurrent() - self.lastCheckTime) > 1.0) { - self.lastCheckTime = CFAbsoluteTimeGetCurrent() + if ((CACurrentMediaTime() - self.lastCheckTime) > 1.0) { + self.lastCheckTime = CACurrentMediaTime() print("FPS: \(self.framesSinceLastCheck)") self.framesSinceLastCheck = 0 } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 5e334aa8..342df1f4 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -483,7 +483,7 @@ public class MovieInput: ImageSource { } if self.runBenchmark { - let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) + let currentFrameTime = (CACurrentMediaTime() - startTime) self.totalFrameTimeDuringCapture += currentFrameTime print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.totalFramesSent)) ms") print("Current frame time : \(1000.0 * currentFrameTime) ms") diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 76c4c716..13d1e5a2 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -570,13 +570,13 @@ private extension MoviePlayer { delegate?.moviePlayerDidReadPixelBuffer(pixelBuffer, time: timeForDisplay) - let startTime = CFAbsoluteTimeGetCurrent() + let startTime = CACurrentMediaTime() if runBenchmark || logEnabled { totalFramesSent += 1 } defer { if runBenchmark { - let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) + let currentFrameTime = (CACurrentMediaTime() - startTime) totalFrameTime += currentFrameTime print("[MoviePlayer] Average frame time :\(1000.0 * totalFrameTime / Double(totalFramesSent)) ms") print("[MoviePlayer] Current frame time :\(1000.0 * currentFrameTime) ms") diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index f84ea06a..9954e4fb 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -102,8 +102,6 @@ public class PictureInput: ImageSource { } try sharedImageProcessingContext.runOperationSynchronously{ - // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); - if (shouldRedrawUsingCoreGraphics) { // For resized or incompatible image: redraw imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) From a78fb268af5df4e538549d0104781424a55944eb Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 1 Nov 2020 15:08:04 +0800 Subject: [PATCH 284/332] improve: extract ResizeCrop frame calculation logic and support directly crop on Camera output Framebuffer --- framework/Source/CameraConversion.swift | 25 +++++- framework/Source/Operations/ResizeCrop.swift | 92 +++++++++++--------- framework/Source/Pipeline.swift | 14 +++ framework/Source/iOS/Camera.swift | 8 +- 4 files changed, 93 insertions(+), 46 deletions(-) diff --git a/framework/Source/CameraConversion.swift b/framework/Source/CameraConversion.swift index 3d120540..93a5859f 100644 --- a/framework/Source/CameraConversion.swift +++ b/framework/Source/CameraConversion.swift @@ -21,12 +21,29 @@ public let colorConversionMatrix709Default = Matrix3x3(rowMajorValues:[ 1.793, -0.533, 0.0, ]) -public func convertYUVToRGB(shader:ShaderProgram, luminanceFramebuffer:Framebuffer, chrominanceFramebuffer:Framebuffer, secondChrominanceFramebuffer:Framebuffer? = nil, resultFramebuffer:Framebuffer, colorConversionMatrix:Matrix3x3) { +public func convertYUVToRGB(shader:ShaderProgram, luminanceFramebuffer:Framebuffer, chrominanceFramebuffer:Framebuffer, secondChrominanceFramebuffer:Framebuffer? = nil, resizeOutput: ResizeOutputInfo? = nil, resultFramebuffer:Framebuffer, colorConversionMatrix:Matrix3x3) { let textureProperties:[InputTextureProperties] - if let secondChrominanceFramebuffer = secondChrominanceFramebuffer { - textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), secondChrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)] + let luminanceTextureProperties: InputTextureProperties + let chrominanceTextureProperties: InputTextureProperties + var secondChrominanceTextureProperties: InputTextureProperties? + if let resizeOutput = resizeOutput { + luminanceTextureProperties = InputTextureProperties(textureCoordinates:luminanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin:resizeOutput.normalizedOffsetFromOrigin, cropSize:resizeOutput.normalizedCropSize), texture:luminanceFramebuffer.texture) + chrominanceTextureProperties = InputTextureProperties(textureCoordinates:chrominanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin:resizeOutput.normalizedOffsetFromOrigin, cropSize:resizeOutput.normalizedCropSize), texture:chrominanceFramebuffer.texture) + if let secondChrominanceFramebuffer = secondChrominanceFramebuffer { + secondChrominanceTextureProperties = InputTextureProperties(textureCoordinates:secondChrominanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin:resizeOutput.normalizedOffsetFromOrigin, cropSize:resizeOutput.normalizedCropSize), texture:secondChrominanceFramebuffer.texture) + } } else { - textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)] + luminanceTextureProperties = luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation) + chrominanceTextureProperties = chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation) + if let secondChrominanceFramebuffer = secondChrominanceFramebuffer { + secondChrominanceTextureProperties = secondChrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation) + } + } + + if let secondChrominanceFramebuffer = secondChrominanceFramebuffer, let secondChrominanceTextureProperties = secondChrominanceTextureProperties { + textureProperties = [luminanceTextureProperties, chrominanceTextureProperties, secondChrominanceTextureProperties] + } else { + textureProperties = [luminanceTextureProperties, chrominanceTextureProperties] } resultFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) diff --git a/framework/Source/Operations/ResizeCrop.swift b/framework/Source/Operations/ResizeCrop.swift index 404d80f4..c43bc89a 100644 --- a/framework/Source/Operations/ResizeCrop.swift +++ b/framework/Source/Operations/ResizeCrop.swift @@ -1,3 +1,48 @@ +public struct ResizeOutputInfo { + let finalCropSize: Size + let normalizedCropSize: Size + let normalizedOffsetFromOrigin: Position +} + +public func calculateResizeOutput(inputSize: Size, outputSize: Size?, scaleOutputSizeToFill: Bool) -> ResizeOutputInfo { + let finalCropSize: Size + let normalizedCropSize: Size + let normalizedOffsetFromOrigin: Position + + if let outputSize = outputSize { + let glCropSize: Size + + if scaleOutputSizeToFill { + // finalCropSize won't be resized + let ratioW = outputSize.width / inputSize.width + let ratioH = outputSize.height / inputSize.height + if ratioW > ratioH { + glCropSize = Size(width: inputSize.width, height: inputSize.width * (outputSize.height / outputSize.width)) + } else { + glCropSize = Size(width: inputSize.height * (outputSize.width / outputSize.height), height: inputSize.height) + } + } else { + // finalCropSize might be resized + glCropSize = outputSize + } + + finalCropSize = Size(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) + + // Scale finalCropSize to inputSize to crop original content + let aspectFitRatioToOrigin = min(inputSize.width / finalCropSize.width, inputSize.height / finalCropSize.height) + let cropSizeInOrigin = Size(width: finalCropSize.width * aspectFitRatioToOrigin, height: finalCropSize.height * aspectFitRatioToOrigin) + normalizedCropSize = Size(width: cropSizeInOrigin.width / inputSize.width, height: cropSizeInOrigin.height / inputSize.height) + normalizedOffsetFromOrigin = Position((inputSize.width - cropSizeInOrigin.width) / 2 / inputSize.width, + (inputSize.height - cropSizeInOrigin.height) / 2 / inputSize.height) + } else { + finalCropSize = inputSize + normalizedOffsetFromOrigin = Position.zero + normalizedCropSize = Size(width: 1, height: 1) + } + + return ResizeOutputInfo(finalCropSize: finalCropSize, normalizedCropSize: normalizedCropSize, normalizedOffsetFromOrigin: normalizedOffsetFromOrigin) +} + open class ResizeCrop: BasicOperation { public var useCropSizeAsFinal = false public var cropSizeInPixels: Size? @@ -11,57 +56,24 @@ open class ResizeCrop: BasicOperation { let inputGLSize = inputFramebuffer.sizeForTargetOrientation(.portrait) let inputSize = Size(inputGLSize) - let (normalizedOffsetFromOrigin, finalCropSize, normalizedCropSize) = calculateFinalFrame(inputSize: inputSize) + let resizeOutputInfo = calculateResizeOutput(inputSize: inputSize, outputSize: cropSizeInPixels, scaleOutputSizeToFill: !useCropSizeAsFinal) renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties( orientation: .portrait, - size: GLSize(finalCropSize), + size: GLSize(resizeOutputInfo.finalCropSize), stencil: false) - let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) + let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:resizeOutputInfo.normalizedOffsetFromOrigin, cropSize:resizeOutputInfo.normalizedCropSize), texture:inputFramebuffer.texture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[textureProperties]) releaseIncomingFramebuffers() } +} - public func calculateFinalFrame(inputSize: Size) -> (Position, Size, Size) { - let finalCropSize: Size - let normalizedCropSize: Size - let normalizedOffsetFromOrigin: Position - - if let cropSize = cropSizeInPixels { - let glCropSize: Size - - if useCropSizeAsFinal { - // finalCropSize might be resized - glCropSize = cropSize - } else { - // finalCropSize won't be resized - let ratioW = cropSize.width / inputSize.width - let ratioH = cropSize.height / inputSize.height - if ratioW > ratioH { - glCropSize = Size(width: inputSize.width, height: inputSize.width * (cropSize.height / cropSize.width)) - } else { - glCropSize = Size(width: inputSize.height * (cropSize.width / cropSize.height), height: inputSize.height) - } - } - - finalCropSize = Size(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) - - // Scale finalCropSize to inputSize to crop original content - let aspectFitRatioToOrigin = min(inputSize.width / finalCropSize.width, inputSize.height / finalCropSize.height) - let cropSizeInOrigin = Size(width: finalCropSize.width * aspectFitRatioToOrigin, height: finalCropSize.height * aspectFitRatioToOrigin) - normalizedCropSize = Size(width: cropSizeInOrigin.width / inputSize.width, height: cropSizeInOrigin.height / inputSize.height) - normalizedOffsetFromOrigin = Position((inputSize.width / 2 - cropSizeInOrigin.width / 2) / inputSize.width, - (inputSize.height / 2 - cropSizeInOrigin.height / 2) / inputSize.height) - } else { - finalCropSize = inputSize - normalizedOffsetFromOrigin = Position.zero - normalizedCropSize = Size(width: 1, height: 1) - } - - return (normalizedOffsetFromOrigin, finalCropSize, normalizedCropSize) +extension GLSize { + var gpuSize: Size { + return Size(width: Float(width), height: Float(height)) } } diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 70af36a2..b3f8e945 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -134,6 +134,20 @@ public extension ImageConsumer { } sources.sources.removeAll() } + + func flushWithTinyBuffer(in context: OpenGLContext = sharedImageProcessingContext) { + context.runOperationSynchronously { + do { + for index in 0.. Date: Sun, 1 Nov 2020 15:08:32 +0800 Subject: [PATCH 285/332] chore: change framebuffer cache warning count to 10 --- framework/Source/FramebufferCache.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index 807c6a94..b3707d9d 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -27,7 +27,7 @@ public class FramebufferCache { let hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) let framebuffer:Framebuffer - if(framebufferCache.count > 20) { + if(framebufferCache.count > 10) { print("Warning: Runaway framebuffer cache with size: \(framebufferCache.count)") } From ccd52744999e1ad9dacdb879c622d60ffe071fdb Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 13 Nov 2020 14:16:33 +0800 Subject: [PATCH 286/332] improve(PictureInput): better image orientation processing --- framework/Source/ImageOrientation.swift | 17 +++++++++++++++++ framework/Source/iOS/PictureInput.swift | 8 ++++---- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/framework/Source/ImageOrientation.swift b/framework/Source/ImageOrientation.swift index 7371c0d1..cac3016c 100644 --- a/framework/Source/ImageOrientation.swift +++ b/framework/Source/ImageOrientation.swift @@ -40,3 +40,20 @@ public enum Rotation { } } } + +public extension UIImage.Orientation { + var gpuOrientation: ImageOrientation { + switch self { + case .up, .upMirrored: + return .portrait + case .down, .downMirrored: + return .portraitUpsideDown + case .left, .leftMirrored: + return .landscapeLeft + case .right, .rightMirrored: + return .landscapeRight + @unknown default: + return .portrait + } + } +} diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 9954e4fb..51b0cb68 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -141,13 +141,13 @@ public class PictureInput: ImageSource { } - public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { - try self.init(image:image.cgImage!, imageName:"UIImage", smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { + try self.init(image:image.cgImage!, imageName:"UIImage", smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) } - public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) throws { + public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { guard let image = UIImage(named:imageName) else { throw PictureInputError.noSuchImageError(imageName: imageName) } - try self.init(image:image.cgImage!, imageName:imageName, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + try self.init(image:image.cgImage!, imageName:imageName, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) } deinit { From b236841846c1ad0306eee4a7348abe6f282aaa5f Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 17 Nov 2020 18:53:31 +0800 Subject: [PATCH 287/332] improve(FrameBufferCache): allow more type of framebuffer to unlock after renderFrame --- framework/Source/BasicOperation.swift | 2 ++ framework/Source/Framebuffer.swift | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 0a5db657..86eb832d 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -143,6 +143,8 @@ open class BasicOperation: ImageProcessingOperation { renderFramebuffer.timingStyle = .videoFrame(timestamp:timestamp) } + framebuffer.unlock() + } else if framebuffer.cache != nil { framebuffer.unlock() } else { remainingFramebuffers[key] = framebuffer diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index b0d6a213..dc717005 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -169,7 +169,7 @@ public class Framebuffer: Hashable { // MARK: - // MARK: Framebuffer cache - weak var cache:FramebufferCache? + public weak var cache:FramebufferCache? var framebufferRetainCount = 0 public func lock() { framebufferRetainCount += 1 From b3970e074df7ca1ef88ceaeecfbedcbe06392fff Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 17 Nov 2020 18:55:31 +0800 Subject: [PATCH 288/332] improve(PictureInput): support resize when input image from disk --- framework/Source/ImageOrientation.swift | 14 +++++++ framework/Source/iOS/PictureInput.swift | 49 +++++++++++++++++++++++-- 2 files changed, 59 insertions(+), 4 deletions(-) diff --git a/framework/Source/ImageOrientation.swift b/framework/Source/ImageOrientation.swift index cac3016c..08e8c240 100644 --- a/framework/Source/ImageOrientation.swift +++ b/framework/Source/ImageOrientation.swift @@ -56,4 +56,18 @@ public extension UIImage.Orientation { return .portrait } } + + var cgImageOrientation: CGImagePropertyOrientation { + switch self { + case .up: return .up + case .down: return .down + case .left: return .left + case .right: return .right + case .upMirrored: return .upMirrored + case .downMirrored: return .downMirrored + case .leftMirrored: return .leftMirrored + case .rightMirrored: return .rightMirrored + @unknown default: return .up + } + } } diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 51b0cb68..e0dede21 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -141,13 +141,43 @@ public class PictureInput: ImageSource { } - public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { - try self.init(image:image.cgImage!, imageName:"UIImage", smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) + public convenience init(image:UIImage, size: CGSize? = nil, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { + try self.init(image:image, name:"UIImage", size: size, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) } - public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { + public convenience init(imageName:String, size: CGSize? = nil, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { guard let image = UIImage(named:imageName) else { throw PictureInputError.noSuchImageError(imageName: imageName) } - try self.init(image:image.cgImage!, imageName:imageName, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) + try self.init(image:image, name: imageName, size: size, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) + } + + public convenience init(image: UIImage, name: String, size: CGSize? = nil, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { + var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation + var cgImage: CGImage = image.cgImage! + if let targetSize = size { + autoreleasepool { + // Get CIImage with orientation + guard var newImage = CIImage(image: image, options: + [.applyOrientationProperty: true, + .properties: [kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue]]) else { + return + } + + // Scale + let ratioW = targetSize.width / image.size.width + let ratioH = targetSize.height / image.size.height + let fillRatio = max(ratioW, ratioH) + let scaleTransform = CGAffineTransform(scaleX: fillRatio, y: fillRatio) + newImage = newImage.transformed(by: scaleTransform) + + // Crop and generate imag + let cropRect = CGRect(x: (newImage.extent.size.width - targetSize.width) / 2, y: (newImage.extent.size.height - targetSize.height)/2, width: targetSize.width, height: targetSize.height) + + let context = CIContext(options: nil) + cgImage = context.createCGImage(newImage, from: cropRect)! + targetOrientation = orientation ?? .portrait + } + } + try self.init(image: cgImage, imageName: name, smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation) } deinit { @@ -186,3 +216,14 @@ public class PictureInput: ImageSource { }*/ } } + +public extension CGSize { + func rotatedByOrientation(_ imageOrientation: ImageOrientation) -> CGSize { + switch imageOrientation { + case .portrait, .portraitUpsideDown: + return self + case .landscapeLeft, .landscapeRight: + return CGSize(width: height, height: width) + } + } +} From 9c141cf753ec643cd2b3c5d2abee4d7f6302fb9a Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Tue, 17 Nov 2020 16:57:31 +0800 Subject: [PATCH 289/332] fix(MovieOutput): fix empty buffer in audio track. --- framework/Source/iOS/MovieCache.swift | 4 + framework/Source/iOS/MovieOutput.swift | 392 ++++++++++++++++--------- 2 files changed, 251 insertions(+), 145 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index ff05aca1..20a72db2 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -73,6 +73,7 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { } public func setMovieOutputIfNotReady(url: URL, + fps: Double, size: Size, fileType:AVFileType = .mov, liveVideo:Bool = false, @@ -85,6 +86,7 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { _ configure: ((MovieOutput) -> Void)? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in self?._setMovieOutputIfNotReady(url: url, + fps: fps, size: size, fileType: fileType, liveVideo: liveVideo, @@ -193,6 +195,7 @@ private extension MovieCache { } func _setMovieOutputIfNotReady(url: URL, + fps: Double, size: Size, fileType: AVFileType = .mov, liveVideo: Bool = false, @@ -213,6 +216,7 @@ private extension MovieCache { } do { let newMovieOutput = try MovieOutput(URL: url, + fps: fps, size: size, fileType: fileType, liveVideo: liveVideo, diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index d8153185..a4c085b2 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -56,6 +56,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public weak var delegate: MovieOutputDelegate? public let url: URL + public let fps: Double public var videoID: String? public var writerStatus: AVAssetWriter.Status { assetWriter.status } public var writerError: Error? { assetWriter.error } @@ -65,12 +66,19 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor public let size: Size private let colorSwizzlingShader:ShaderProgram + public var isTranscode = false var videoEncodingIsFinished = false var audioEncodingIsFinished = false var markIsFinishedAfterProcessing = false + private var hasVideoBuffer = false + private var hasAuidoBuffer = false public private(set) var startFrameTime: CMTime? public private(set) var recordedDuration: CMTime? - private var previousFrameTime: CMTime? + private var previousVideoStartTime: CMTime? + private var previousAudioStartTime: CMTime? + private var previousVideoEndTime: CMTime? + private var previousAudioEndTime: CMTime? + var encodingLiveVideo:Bool { didSet { assetWriterVideoInput.expectsMediaDataInRealTime = encodingLiveVideo @@ -96,6 +104,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return context! }() public private(set) var audioSampleBufferCache = [CMSampleBuffer]() + public private(set) var videoSampleBufferCache = [CMSampleBuffer]() + public private(set) var frameBufferCache = [Framebuffer]() public private(set) var cacheBuffersDuration: TimeInterval = 0 public let disablePixelBufferAttachments: Bool private var pixelBufferPoolSemaphore = DispatchSemaphore(value: 1) @@ -116,10 +126,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var preferredTransform: CGAffineTransform? private var isProcessing = false - public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { + public init(URL:Foundation.URL, fps: Double, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { print("movie output init \(URL)") self.url = URL + self.fps = fps if sharedImageProcessingContext.supportsTextureCaches() { self.colorSwizzlingShader = sharedImageProcessingContext.passthroughShader @@ -292,16 +303,22 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.assetWriterAudioInput?.markAsFinished() self.assetWriterVideoInput.markAsFinished() - if let lastFrame = self.previousFrameTime { - // Resolve black frames at the end. Without this the end timestamp of the session's samples could be either video or audio. - // Documentation: "You do not need to call this method; if you call finishWriting without - // calling this method, the session's effective end time will be the latest end timestamp of - // the session's samples (that is, no samples will be edited out at the end)." - print("MovieOutput start endSession") - self.assetWriter.endSession(atSourceTime: lastFrame) - } - if let lastFrame = self.previousFrameTime, let startFrame = self.startFrameTime { + var lastFrameTime: CMTime? + if let lastVideoFrame = self.previousVideoStartTime { + if self.isTranscode { + print("MovieOutput start endSession") + lastFrameTime = lastVideoFrame + self.assetWriter.endSession(atSourceTime: lastVideoFrame) + } else if let lastAudioTime = self.previousAudioEndTime, let lastVideoTime = self.previousVideoEndTime { + let endTime = min(lastAudioTime, lastVideoTime) + lastFrameTime = endTime + print("MovieOutput start endSession, last audio end time is:\(lastAudioTime.seconds), last video end time is:\(lastVideoTime.seconds), end time is:\(endTime.seconds)") + self.assetWriter.endSession(atSourceTime: endTime) + } + } + + if let lastFrame = lastFrameTime, let startFrame = self.startFrameTime { self.recordedDuration = lastFrame - startFrame } print("MovieOutput did start finishing writing. Total frames appended video::\(self.totalVideoFramesAppended) audio:\(self.totalAudioFramesAppended)") @@ -351,8 +368,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glFinish() - if previousFrameTime == nil { - debugPrint("starting process new framebuffer when previousFrameTime == nil") + if previousVideoStartTime == nil { + debugPrint("MovieOutput starting process new framebuffer when previousFrameTime == nil") } let work = { [weak self] in @@ -377,85 +394,105 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { func _processFramebuffer(_ framebuffer: Framebuffer) -> Bool { guard assetWriter.status == .writing, !videoEncodingIsFinished else { - print("Guard fell through, dropping video frame. writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") + print("MovieOutput Guard fell through, dropping video frame. writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") return false } - do { - // Ignore still images and other non-video updates (do I still need this?) - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { - print("Cannot get timestamp from framebuffer, dropping frame") - return false - } - - if previousFrameTime == nil { - // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. - assetWriter.startSession(atSourceTime: frameTime) - startFrameTime = frameTime - print("did start writing at:\(frameTime.seconds)") - delegate?.movieOutputDidStartWriting(self, at: frameTime) - } - previousFrameTime = frameTime - - pixelBuffer = nil - pixelBufferPoolSemaphore.wait() - guard assetWriterPixelBufferInput.pixelBufferPool != nil else { - print("WARNING: PixelBufferInput pool is nil") - return false - } - let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) - pixelBufferPoolSemaphore.signal() - guard pixelBuffer != nil && pixelBufferStatus == kCVReturnSuccess else { - print("WARNING: Unable to create pixel buffer, dropping frame") - return false - } - try renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) - guard assetWriterVideoInput.isReadyForMoreMediaData || shouldWaitForEncoding else { - print("WARNING: Had to drop a frame at time \(frameTime)") - return false - } - while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { - synchronizedEncodingDebugPrint("Video waiting...") - // Better to poll isReadyForMoreMediaData often since when it does become true - // we don't want to risk letting framebuffers pile up in between poll intervals. - usleep(100000) // 0.1 seconds - if markIsFinishedAfterProcessing { - synchronizedEncodingDebugPrint("set videoEncodingIsFinished to true after processing") - markIsFinishedAfterProcessing = false - videoEncodingIsFinished = true + + framebuffer.lock() + frameBufferCache.append(framebuffer) + hasVideoBuffer = true + + guard _canStartWritingVideo() else { + return true + } + + if !isTranscode && startFrameTime == nil { + _decideStartTime() + } + + var processedBufferCount = 0 + for framebuffer in frameBufferCache { + defer { framebuffer.unlock() } + do { + // Ignore still images and other non-video updates (do I still need this?) + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { + print("MovieOutput Cannot get timestamp from framebuffer, dropping frame") + continue } + + if previousVideoStartTime == nil && isTranscode { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. + assetWriter.startSession(atSourceTime: frameTime) + startFrameTime = frameTime + print("MovieOutput did start writing at:\(frameTime.seconds)") + delegate?.movieOutputDidStartWriting(self, at: frameTime) + } + previousVideoStartTime = frameTime + + pixelBuffer = nil + pixelBufferPoolSemaphore.wait() + guard assetWriterPixelBufferInput.pixelBufferPool != nil else { + print("MovieOutput WARNING: PixelBufferInput pool is nil") + continue + } + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) + pixelBufferPoolSemaphore.signal() + guard pixelBuffer != nil && pixelBufferStatus == kCVReturnSuccess else { + print("MovieOutput WARNING: Unable to create pixel buffer, dropping frame") + continue + } + try renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) + guard assetWriterVideoInput.isReadyForMoreMediaData || shouldWaitForEncoding else { + print("MovieOutput WARNING: Had to drop a frame at time \(frameTime)") + continue + } + while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { + synchronizedEncodingDebugPrint("MovieOutput Video waiting...") + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds + if markIsFinishedAfterProcessing { + synchronizedEncodingDebugPrint("MovieOutput set videoEncodingIsFinished to true after processing") + markIsFinishedAfterProcessing = false + videoEncodingIsFinished = true + } + } + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard !_checkSampleTimeDuplicated(frameTime) else { + processedBufferCount += 1 + continue + } + + let bufferInput = assetWriterPixelBufferInput + var appendResult = false + synchronizedEncodingDebugPrint("MovieOutput appending video framebuffer at:\(frameTime.seconds)") + // NOTE: when NSException was triggered within NSObject.catchException, the object inside the block seems cannot be released correctly, so be careful not to trigger error, or directly use "self." + try NSObject.catchException { + appendResult = bufferInput.append(self.pixelBuffer!, withPresentationTime: frameTime) + } + if !appendResult { + print("MovieOutput WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") + continue + } + totalVideoFramesAppended += 1 + processedBufferCount += 1 + previousVideoEndTime = frameTime + _videoFrameDuration() + if videoEncodingIsFinished { + assetWriterVideoInput.markAsFinished() + } + } catch { + print("MovieOutput WARNING: Trouble appending pixel buffer \(error)") } - - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. - guard !_checkSampleTimeDuplicated(frameTime) else { return true } - - let bufferInput = assetWriterPixelBufferInput - var appendResult = false - synchronizedEncodingDebugPrint("appending video framebuffer at:\(frameTime.seconds)") - // NOTE: when NSException was triggered within NSObject.catchException, the object inside the block seems cannot be released correctly, so be careful not to trigger error, or directly use "self." - try NSObject.catchException { - appendResult = bufferInput.append(self.pixelBuffer!, withPresentationTime: frameTime) - } - if !appendResult { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") - return false - } - totalVideoFramesAppended += 1 - - if videoEncodingIsFinished { - assetWriterVideoInput.markAsFinished() - } - - return true - } catch { - print("WARNING: Trouble appending pixel buffer \(error)") - return false } + frameBufferCache.removeFirst(processedBufferCount) + return true } func _checkSampleTimeDuplicated(_ sampleTime: CMTime) -> Bool { let sampleTimeInSeconds = sampleTime.seconds if writtenSampleTimes.contains(sampleTimeInSeconds) { - print("WARNING: sampleTime:\(sampleTime) is duplicated, dropped!") + print("MovieOutput WARNING: sampleTime:\(sampleTime) is duplicated, dropped!") return true } // Avoid too large collection @@ -479,7 +516,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var cachedTextureRef:CVOpenGLESTexture? = nil let ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) if ret != kCVReturnSuccess { - print("ret error: \(ret), pixelBuffer: \(pixelBuffer)") + print("MovieOutput ret error: \(ret), pixelBuffer: \(pixelBuffer)") return } let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) @@ -520,67 +557,87 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } guard assetWriter.status == .writing, !videoEncodingIsFinished else { - print("Guard fell through, dropping video frame. writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") + print("MovieOutput Guard fell through, dropping video frame. writer.state:\(self.assetWriter.status.rawValue) videoEncodingIsFinished:\(self.videoEncodingIsFinished)") return false } - let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - - if previousFrameTime == nil { - // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. - let startFrameTime = frameTime - assetWriter.startSession(atSourceTime: startFrameTime) - self.startFrameTime = startFrameTime - delegate?.movieOutputDidStartWriting(self, at: startFrameTime) - } + hasVideoBuffer = true + videoSampleBufferCache.append(sampleBuffer) - previousFrameTime = frameTime - - guard (assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { - print("Had to drop a frame at time \(frameTime)") - return false + guard _canStartWritingVideo() else { + print("MovieOutput Audio not started yet") + return true } - while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { - self.synchronizedEncodingDebugPrint("Video waiting...") - // Better to poll isReadyForMoreMediaData often since when it does become true - // we don't want to risk letting framebuffers pile up in between poll intervals. - usleep(100000) // 0.1 seconds - } - synchronizedEncodingDebugPrint("appending video sample buffer at:\(frameTime.seconds)") - guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { - print("WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") - return false - } - if !assetWriterVideoInput.isReadyForMoreMediaData { - print("WARNING: video input is not ready at time: \(frameTime))") - return false + if !isTranscode && startFrameTime == nil { + _decideStartTime() } - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. - guard !_checkSampleTimeDuplicated(frameTime) else { return true } - - if let ciFilter = ciFilter { - let originalImage = CIImage(cvPixelBuffer: buffer) - if let outputImage = ciFilter.applyFilter(on: originalImage), let ciContext = cpuCIContext { - ciContext.render(outputImage, to: buffer) + var processedBufferCount = 0 + for sampleBuffer in videoSampleBufferCache { + let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + + if previousVideoStartTime == nil && isTranscode { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. + assetWriter.startSession(atSourceTime: frameTime) + startFrameTime = frameTime + print("MovieOutput did start writing at:\(frameTime.seconds)") + delegate?.movieOutputDidStartWriting(self, at: frameTime) } - } - let bufferInput = assetWriterPixelBufferInput - do { - var appendResult = false - try NSObject.catchException { - appendResult = bufferInput.append(buffer, withPresentationTime: frameTime) + + previousVideoStartTime = frameTime + + guard (assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { + print("MovieOutput Had to drop a frame at time \(frameTime)") + continue } - if (!appendResult) { - print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: assetWriter.error))") - return false + + while !assetWriterVideoInput.isReadyForMoreMediaData && shouldWaitForEncoding && !videoEncodingIsFinished { + self.synchronizedEncodingDebugPrint("MovieOutput Video waiting...") + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds + } + synchronizedEncodingDebugPrint("MovieOutput appending video sample buffer at:\(frameTime.seconds)") + guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + print("MovieOutput WARNING: Cannot get pixel buffer from sampleBuffer:\(sampleBuffer)") + continue + } + if !assetWriterVideoInput.isReadyForMoreMediaData { + print("MovieOutput WARNING: video input is not ready at time: \(frameTime))") + continue + } + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard !_checkSampleTimeDuplicated(frameTime) else { + processedBufferCount += 1 + continue + } + + if let ciFilter = ciFilter { + let originalImage = CIImage(cvPixelBuffer: buffer) + if let outputImage = ciFilter.applyFilter(on: originalImage), let ciContext = cpuCIContext { + ciContext.render(outputImage, to: buffer) + } + } + let bufferInput = assetWriterPixelBufferInput + do { + var appendResult = false + try NSObject.catchException { + appendResult = bufferInput.append(buffer, withPresentationTime: frameTime) + } + if (!appendResult) { + print("MovieOutput WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: assetWriter.error))") + continue + } + totalVideoFramesAppended += 1 + processedBufferCount += 1 + previousVideoEndTime = frameTime + _videoFrameDuration() + } catch { + print("MovieOutput WARNING: Trouble appending video sample buffer at time: \(frameTime) \(error)") } - totalVideoFramesAppended += 1 - } catch { - print("WARNING: Trouble appending video sample buffer at time: \(frameTime) \(error)") - return false } + videoSampleBufferCache.removeFirst(processedBufferCount) return true } @@ -613,31 +670,38 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { func _processAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) -> Bool { guard assetWriter.status == .writing, !audioEncodingIsFinished, let audioInput = assetWriterAudioInput else { - print("Guard fell through, dropping audio sample, writer.state:\(assetWriter.status.rawValue) audioEncodingIsFinished:\(audioEncodingIsFinished)") + print("MovieOutput Guard fell through, dropping audio sample, writer.state:\(assetWriter.status.rawValue) audioEncodingIsFinished:\(audioEncodingIsFinished)") return false } - + // Always accept audio buffer and cache it at first, since video frame might delay a bit + hasAuidoBuffer = true audioSampleBufferCache.append(sampleBuffer) - guard previousFrameTime != nil else { - print("Process audio sample but first video frame is not ready yet. Time:\(CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer).seconds)") + + guard _canStartWritingAuido() else { + print("MovieOutput Process audio sample but first video frame is not ready yet. Time:\(CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer).seconds)") return true } + if startFrameTime == nil && !isTranscode { + _decideStartTime() + } + var processedBufferCount = 0 for audioBuffer in audioSampleBufferCache { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer) + previousAudioStartTime = currentSampleTime guard audioInput.isReadyForMoreMediaData || shouldWaitForEncoding else { - print("Had to delay a audio sample at time \(currentSampleTime)") - break + print("MovieOutput Had to delay a audio sample at time \(currentSampleTime)") + continue } while !audioInput.isReadyForMoreMediaData && shouldWaitForEncoding && !audioEncodingIsFinished { - print("Audio waiting...") + print("MovieOutput Audio waiting...") usleep(100000) if !audioInput.isReadyForMoreMediaData { - synchronizedEncodingDebugPrint("Audio still not ready, skip this runloop...") - break + synchronizedEncodingDebugPrint("MovieOutput Audio still not ready, skip this runloop...") + continue } } @@ -648,9 +712,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { appendResult = audioInput.append(audioBuffer) } if !appendResult { - print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") - break + print("MovieOutput WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + continue } + previousAudioEndTime = currentSampleTime + CMSampleBufferGetDuration(sampleBuffer) totalAudioFramesAppended += 1 if shouldInvalidateSampleWhenDone { CMSampleBufferInvalidate(audioBuffer) @@ -658,14 +723,51 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { processedBufferCount += 1 } catch { - print("WARNING: Trouble appending audio sample buffer: \(error)") - break + print("MovieOutput WARNING: Trouble appending audio sample buffer: \(error)") + continue } } audioSampleBufferCache.removeFirst(processedBufferCount) return true } + func _videoFrameDuration() -> CMTime { + CMTime(seconds: 1 / fps, preferredTimescale: CMTimeScale(NSEC_PER_SEC)) + } + + func _canStartWritingVideo() -> Bool { + isTranscode || (!isTranscode && hasAuidoBuffer && hasVideoBuffer) + } + + func _canStartWritingAuido() -> Bool { + (isTranscode && previousVideoStartTime != nil) || (!isTranscode && hasAuidoBuffer && hasVideoBuffer) + } + + func _decideStartTime() { + guard let audioBuffer = audioSampleBufferCache.first else { + print("MovieOutput ERROR: empty audio buffer cache, cannot start session") + return + } + let videoTime: CMTime? = { + if let videoBuffer = videoSampleBufferCache.first { + return CMSampleBufferGetOutputPresentationTimeStamp(videoBuffer) + } else if let frameBuffer = frameBufferCache.first { + return frameBuffer.timingStyle.timestamp?.asCMTime + } else { + return nil + } + }() + guard videoTime != nil else { + print("MovieOutput ERROR: empty video time, cannot start session") + return + } + let audioTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer) + let startFrameTime = max(audioTime, videoTime!) + assetWriter.startSession(atSourceTime: startFrameTime) + self.startFrameTime = startFrameTime + delegate?.movieOutputDidStartWriting(self, at: startFrameTime) + } + public func flushPendingAudioBuffers(shouldInvalidateSampleWhenDone: Bool) { guard let lastBuffer = audioSampleBufferCache.popLast() else { return } _ = _processAudioSampleBuffer(lastBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) From b408435b94fd6269f6f6863e37a4824e48e86c32 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 18 Nov 2020 15:59:34 +0800 Subject: [PATCH 290/332] improve(PictureInput): better orientation logic --- framework/Source/iOS/PictureInput.swift | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index e0dede21..1f4f7006 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -141,16 +141,16 @@ public class PictureInput: ImageSource { } - public convenience init(image:UIImage, size: CGSize? = nil, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { - try self.init(image:image, name:"UIImage", size: size, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) + public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { + try self.init(image:image.cgImage!, imageName:"UIImage", smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) } - public convenience init(imageName:String, size: CGSize? = nil, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { + public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { guard let image = UIImage(named:imageName) else { throw PictureInputError.noSuchImageError(imageName: imageName) } - try self.init(image:image, name: imageName, size: size, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) + try self.init(image:image.cgImage!, imageName: imageName, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) } - public convenience init(image: UIImage, name: String, size: CGSize? = nil, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { + public convenience init(image: UIImage, size: CGSize?, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation var cgImage: CGImage = image.cgImage! if let targetSize = size { @@ -169,7 +169,7 @@ public class PictureInput: ImageSource { let scaleTransform = CGAffineTransform(scaleX: fillRatio, y: fillRatio) newImage = newImage.transformed(by: scaleTransform) - // Crop and generate imag + // Crop and generate image let cropRect = CGRect(x: (newImage.extent.size.width - targetSize.width) / 2, y: (newImage.extent.size.height - targetSize.height)/2, width: targetSize.width, height: targetSize.height) let context = CIContext(options: nil) @@ -177,7 +177,7 @@ public class PictureInput: ImageSource { targetOrientation = orientation ?? .portrait } } - try self.init(image: cgImage, imageName: name, smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation) + try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation) } deinit { From 3b9e0608b6fcd4d909afc0c9a5195363d28ff6a2 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 18 Nov 2020 18:23:23 +0800 Subject: [PATCH 291/332] fix(MovieOutput): should not align AV when no audio is recorded --- framework/Source/iOS/MovieCache.swift | 4 ++++ framework/Source/iOS/MovieOutput.swift | 23 ++++++++++++----------- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 20a72db2..3ec2851e 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -75,6 +75,7 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { public func setMovieOutputIfNotReady(url: URL, fps: Double, size: Size, + needAlignAV: Bool, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, @@ -88,6 +89,7 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { self?._setMovieOutputIfNotReady(url: url, fps: fps, size: size, + needAlignAV: needAlignAV, fileType: fileType, liveVideo: liveVideo, videoSettings: videoSettings, @@ -197,6 +199,7 @@ private extension MovieCache { func _setMovieOutputIfNotReady(url: URL, fps: Double, size: Size, + needAlignAV: Bool, fileType: AVFileType = .mov, liveVideo: Bool = false, videoSettings: [String:Any]? = nil, @@ -218,6 +221,7 @@ private extension MovieCache { let newMovieOutput = try MovieOutput(URL: url, fps: fps, size: size, + needAlignAV: needAlignAV, fileType: fileType, liveVideo: liveVideo, videoSettings: videoSettings, diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index a4c085b2..b25da7d3 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -66,7 +66,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor public let size: Size private let colorSwizzlingShader:ShaderProgram - public var isTranscode = false + public let needAlignAV: Bool var videoEncodingIsFinished = false var audioEncodingIsFinished = false var markIsFinishedAfterProcessing = false @@ -126,11 +126,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var preferredTransform: CGAffineTransform? private var isProcessing = false - public init(URL:Foundation.URL, fps: Double, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { + public init(URL:Foundation.URL, fps: Double, size:Size, needAlignAV: Bool = true, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { print("movie output init \(URL)") self.url = URL self.fps = fps + self.needAlignAV = needAlignAV && (audioSettings != nil || audioSourceFormatHint != nil) if sharedImageProcessingContext.supportsTextureCaches() { self.colorSwizzlingShader = sharedImageProcessingContext.passthroughShader @@ -155,7 +156,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? size.width localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? size.height - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecType.h264.rawValue assetWriterVideoInput = AVAssetWriterInput(mediaType:.video, outputSettings:localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo @@ -306,7 +307,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var lastFrameTime: CMTime? if let lastVideoFrame = self.previousVideoStartTime { - if self.isTranscode { + if !self.needAlignAV { print("MovieOutput start endSession") lastFrameTime = lastVideoFrame self.assetWriter.endSession(atSourceTime: lastVideoFrame) @@ -406,7 +407,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return true } - if !isTranscode && startFrameTime == nil { + if needAlignAV && startFrameTime == nil { _decideStartTime() } @@ -420,7 +421,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { continue } - if previousVideoStartTime == nil && isTranscode { + if previousVideoStartTime == nil && !needAlignAV { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. assetWriter.startSession(atSourceTime: frameTime) startFrameTime = frameTime @@ -569,7 +570,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return true } - if !isTranscode && startFrameTime == nil { + if needAlignAV && startFrameTime == nil { _decideStartTime() } @@ -577,7 +578,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { for sampleBuffer in videoSampleBufferCache { let frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - if previousVideoStartTime == nil && isTranscode { + if previousVideoStartTime == nil && !needAlignAV { // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. assetWriter.startSession(atSourceTime: frameTime) startFrameTime = frameTime @@ -683,7 +684,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return true } - if startFrameTime == nil && !isTranscode { + if startFrameTime == nil && needAlignAV { _decideStartTime() } @@ -736,11 +737,11 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } func _canStartWritingVideo() -> Bool { - isTranscode || (!isTranscode && hasAuidoBuffer && hasVideoBuffer) + !needAlignAV || (needAlignAV && hasAuidoBuffer && hasVideoBuffer) } func _canStartWritingAuido() -> Bool { - (isTranscode && previousVideoStartTime != nil) || (!isTranscode && hasAuidoBuffer && hasVideoBuffer) + (!needAlignAV && previousVideoStartTime != nil) || (needAlignAV && hasAuidoBuffer && hasVideoBuffer) } func _decideStartTime() { From 8db116a213e47578d84a172b3807547aca09d38e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 19 Nov 2020 18:42:43 +0800 Subject: [PATCH 292/332] fix(MovieOutput): semaphore didn't signal when pixel pool was released --- framework/Source/iOS/MovieOutput.swift | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index b25da7d3..256cf949 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -432,12 +432,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { pixelBuffer = nil pixelBufferPoolSemaphore.wait() + defer { + pixelBufferPoolSemaphore.signal() + } guard assetWriterPixelBufferInput.pixelBufferPool != nil else { print("MovieOutput WARNING: PixelBufferInput pool is nil") continue } let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) - pixelBufferPoolSemaphore.signal() guard pixelBuffer != nil && pixelBufferStatus == kCVReturnSuccess else { print("MovieOutput WARNING: Unable to create pixel buffer, dropping frame") continue From 54ea85ca4b91bf8bb51649c7dd30d528c7be4db3 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Mon, 23 Nov 2020 12:12:42 +0800 Subject: [PATCH 293/332] fix(orientation): fix unsupported video orientation --- framework/Source/iOS/MoviePlayer.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 13d1e5a2..d73bf314 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -683,7 +683,7 @@ public extension AVAsset { let trackTransform = videoTrack.preferredTransform switch (trackTransform.a, trackTransform.b, trackTransform.c, trackTransform.d) { case (1, 0, 0, 1): return .portrait - case (1, 0, 0, -1): return .portraitUpsideDown + case (1, 0, 0, -1), (-1, 0, 0, -1): return .portraitUpsideDown case (0, 1, -1, 0): return .landscapeLeft case (0, -1, 1, 0): return .landscapeRight default: @@ -700,7 +700,7 @@ public extension AVAsset { let trackTransform = videoTrack.preferredTransform switch (trackTransform.a, trackTransform.b, trackTransform.c, trackTransform.d) { case (1, 0, 0, 1): return .portrait - case (1, 0, 0, -1): return .portraitUpsideDown + case (1, 0, 0, -1), (-1, 0, 0, -1): return .portraitUpsideDown case (0, 1, -1, 0): return .landscapeRight case (0, -1, 1, 0): return .landscapeLeft default: From 0f1557dfe6662e2a183ea3ededa9b12861e0a396 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 2 Dec 2020 18:40:21 +0800 Subject: [PATCH 294/332] fix(camera): fix preview frame size is wrong on iOS 12 --- framework/Source/Operations/ResizeCrop.swift | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/framework/Source/Operations/ResizeCrop.swift b/framework/Source/Operations/ResizeCrop.swift index c43bc89a..a89dd051 100644 --- a/framework/Source/Operations/ResizeCrop.swift +++ b/framework/Source/Operations/ResizeCrop.swift @@ -10,23 +10,19 @@ public func calculateResizeOutput(inputSize: Size, outputSize: Size?, scaleOutpu let normalizedOffsetFromOrigin: Position if let outputSize = outputSize { - let glCropSize: Size - if scaleOutputSizeToFill { // finalCropSize won't be resized let ratioW = outputSize.width / inputSize.width let ratioH = outputSize.height / inputSize.height if ratioW > ratioH { - glCropSize = Size(width: inputSize.width, height: inputSize.width * (outputSize.height / outputSize.width)) + finalCropSize = Size(width: inputSize.width, height: inputSize.width * (outputSize.height / outputSize.width)) } else { - glCropSize = Size(width: inputSize.height * (outputSize.width / outputSize.height), height: inputSize.height) + finalCropSize = Size(width: inputSize.height * (outputSize.width / outputSize.height), height: inputSize.height) } } else { // finalCropSize might be resized - glCropSize = outputSize + finalCropSize = outputSize } - - finalCropSize = Size(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) // Scale finalCropSize to inputSize to crop original content let aspectFitRatioToOrigin = min(inputSize.width / finalCropSize.width, inputSize.height / finalCropSize.height) From d69e3219d0897ce285bc1acd5a1fdba20e8cf294 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 3 Dec 2020 14:30:29 +0800 Subject: [PATCH 295/332] improve(camera): limit max size of camera output buffer --- framework/Source/Operations/ResizeCrop.swift | 22 ++++++++++++++++++++ framework/Source/iOS/Camera.swift | 4 ++-- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/framework/Source/Operations/ResizeCrop.swift b/framework/Source/Operations/ResizeCrop.swift index a89dd051..d5e8bb2b 100644 --- a/framework/Source/Operations/ResizeCrop.swift +++ b/framework/Source/Operations/ResizeCrop.swift @@ -1,9 +1,31 @@ public struct ResizeOutputInfo { + // size in pixel let finalCropSize: Size + // normalized size within [0, 1] of inputSize let normalizedCropSize: Size + // normalized offset to [0, 1] of inputSize let normalizedOffsetFromOrigin: Position } +public func limitedSizeAndRatio(of inputSize: Size, to maxSize: Size) -> ResizeOutputInfo { + // Aspect fit maxSize to inputSize to get normalized size and offset + let aspectFitRatio = min(inputSize.width / maxSize.width, inputSize.height / maxSize.height) + let cropSizeInInput = Size(width: maxSize.width * aspectFitRatio, height: maxSize.height * aspectFitRatio) + let normalizedCropSize = Size(width: cropSizeInInput.width / inputSize.width, height: cropSizeInInput.height / inputSize.height) + let normalizedOffsetFromOrigin = Position((inputSize.width - cropSizeInInput.width) / 2 / inputSize.width, + (inputSize.height - cropSizeInInput.height) / 2 / inputSize.height) + + let finalCropSize: Size + if inputSize.width < maxSize.width && inputSize.height < maxSize.height { + // inputSize is smaller, use cropSizeInInput as finalCropSize + finalCropSize = cropSizeInInput + } else { + // inputSize is larger, use maxSize as finalCropSize + finalCropSize = maxSize + } + return ResizeOutputInfo(finalCropSize: finalCropSize, normalizedCropSize: normalizedCropSize, normalizedOffsetFromOrigin: normalizedOffsetFromOrigin) +} + public func calculateResizeOutput(inputSize: Size, outputSize: Size?, scaleOutputSizeToFill: Bool) -> ResizeOutputInfo { let finalCropSize: Size let normalizedCropSize: Size diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index f05a0cbd..ed7ecd62 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -375,8 +375,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let inputSize = luminanceFramebuffer.sizeForTargetOrientation(.portrait).gpuSize let outputSize = self.outputBufferSize ?? luminanceFramebuffer.sizeForTargetOrientation(.portrait) - let resizeOutput = calculateResizeOutput(inputSize: inputSize, outputSize: self.outputBufferSize?.gpuSize, scaleOutputSizeToFill: false) - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:outputSize, textureOnly:false) + let resizeOutput = limitedSizeAndRatio(of: inputSize, to: outputSize.gpuSize) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(resizeOutput.finalCropSize), textureOnly:false) let conversionMatrix:Matrix3x3 if (self.supportsFullYUVRange) { From 1136938c2b06ecd15669365b48fd4b14a4fcbb17 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 4 Dec 2020 22:29:56 +0800 Subject: [PATCH 296/332] chore: update debug information --- framework/Source/Pipeline.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index b3f8e945..ff27ae3f 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -334,7 +334,7 @@ extension OperationGroup { return obj.debugNameForPipeline } - return "[\(simpleName(self)) -> \(inputImageRelay.debugPipelineDescription)]" + return "[\(simpleName(self))(\(inputImageRelay.debugPipelineDescription))]" } } From aed6d6155ebcaa4b3ad00e1129a85b7cd53a8654 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Thu, 10 Dec 2020 17:53:46 +0800 Subject: [PATCH 297/332] fix(gapless): should consider seeking when call seek item method. --- framework/Source/iOS/MoviePlayer.swift | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index d73bf314..6774d133 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -177,8 +177,18 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } public func seekItem(_ item: AVPlayerItem, to time: CMTime, toleranceBefore: CMTime = .zero, toleranceAfter: CMTime = .zero, completionHandler: ((Bool) -> Void)? = nil) { - print("[MoviePlayer] seek item:\(item) to time:\(time.seconds) toleranceBefore:\(toleranceBefore.seconds) toleranceAfter:\(toleranceAfter.seconds)") - item.seek(to: time, toleranceBefore: toleranceBefore, toleranceAfter: toleranceAfter, completionHandler: completionHandler) + print("[MoviePlayer] [player] seek item:\(item) to time:\(time.seconds) toleranceBefore:\(toleranceBefore.seconds) toleranceAfter:\(toleranceAfter.seconds)") + let seekCurrentItem = item != currentItem + guard !seekCurrentItem || !isSeeking else { return } + if seekCurrentItem { + isSeeking = true + } + item.seek(to: time, toleranceBefore: toleranceBefore, toleranceAfter: toleranceAfter) { [weak self] success in + if seekCurrentItem { + self?.isSeeking = false + } + completionHandler?(success) + } didNotifyEndedItem = nil } From fb052b25afae8bf39041b891926a55ef75a77c7d Mon Sep 17 00:00:00 2001 From: Cokile Date: Wed, 9 Dec 2020 19:12:29 +0800 Subject: [PATCH 298/332] feat(framebuffer): can control should return to cache --- framework/Source/BasicOperation.swift | 4 ++-- framework/Source/Framebuffer.swift | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 86eb832d..384bb5f7 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -84,7 +84,7 @@ open class BasicOperation: ImageProcessingOperation { // MARK: - // MARK: Rendering - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + open func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { if let previousFramebuffer = inputFramebuffers[fromSourceIndex] { previousFramebuffer.unlock() } @@ -144,7 +144,7 @@ open class BasicOperation: ImageProcessingOperation { } framebuffer.unlock() - } else if framebuffer.cache != nil { + } else if framebuffer.shouldReturnToCache && framebuffer.cache != nil { framebuffer.unlock() } else { remainingFramebuffers[key] = framebuffer diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index dc717005..0db9319c 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -170,6 +170,7 @@ public class Framebuffer: Hashable { // MARK: Framebuffer cache public weak var cache:FramebufferCache? + public var shouldReturnToCache = true var framebufferRetainCount = 0 public func lock() { framebufferRetainCount += 1 From bafff3a5d2e38d3a1f0916e3f8713d4d41b5edfc Mon Sep 17 00:00:00 2001 From: Cokile Date: Thu, 17 Dec 2020 20:43:01 +0800 Subject: [PATCH 299/332] fix(moviePlayer): fix seek will play previous frames --- framework/Source/iOS/MoviePlayer.swift | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 6774d133..397e32f2 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -53,6 +53,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { var timebaseInfo = mach_timebase_info_data_t() var totalFramesSent = 0 var totalFrameTime: Double = 0.0 + public var dropFrameBeforeTime: CMTime? public var playrate: Float = 1.0 public var assetDuration: CMTime { return asset?.duration ?? .zero @@ -178,10 +179,11 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public func seekItem(_ item: AVPlayerItem, to time: CMTime, toleranceBefore: CMTime = .zero, toleranceAfter: CMTime = .zero, completionHandler: ((Bool) -> Void)? = nil) { print("[MoviePlayer] [player] seek item:\(item) to time:\(time.seconds) toleranceBefore:\(toleranceBefore.seconds) toleranceAfter:\(toleranceAfter.seconds)") - let seekCurrentItem = item != currentItem + let seekCurrentItem = item == currentItem guard !seekCurrentItem || !isSeeking else { return } if seekCurrentItem { isSeeking = true + dropFrameBeforeTime = time } item.seek(to: time, toleranceBefore: toleranceBefore, toleranceAfter: toleranceAfter) { [weak self] success in if seekCurrentItem { @@ -198,6 +200,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public func replaceCurrentItem(with item: AVPlayerItem?, enableVideoOutput: Bool) { didNotifyEndedItem = nil + dropFrameBeforeTime = nil lastPlayerItem = item // Stop looping before replacing if shouldUseLooper && MoviePlayer.looperDict[self] != nil { @@ -328,6 +331,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { displayLink = nil isSeeking = false nextSeeking = nil + dropFrameBeforeTime = nil MoviePlayer.looperDict[self]?.disableLooping() MoviePlayer.looperDict[self] = nil } @@ -578,6 +582,15 @@ private extension MoviePlayer { return } + // There are still some previous frames coming after seeking. So we drop these frames + if let dropFrameBeforeTime = dropFrameBeforeTime, CMTimeCompare(timeForDisplay, dropFrameBeforeTime) <= 0 { + print("[MoviePlayer] drop frame at time:\(timeForDisplay.seconds), dropFrameBeforeTime:\(dropFrameBeforeTime.seconds)") + return + } + dropFrameBeforeTime = nil + +// print("[MoviePlayer] read frame at time:\(timeForDisplay.seconds)") + delegate?.moviePlayerDidReadPixelBuffer(pixelBuffer, time: timeForDisplay) let startTime = CACurrentMediaTime() From acd95246150c2f0b2e8185187b791734aaaf84d0 Mon Sep 17 00:00:00 2001 From: Cokile Date: Tue, 22 Dec 2020 11:47:01 +0800 Subject: [PATCH 300/332] feat(renderView): add api for cropFrame --- framework/Source/CameraConversion.swift | 2 +- framework/Source/iOS/RenderView.swift | 31 +++++++++++++++++++------ 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/framework/Source/CameraConversion.swift b/framework/Source/CameraConversion.swift index 93a5859f..17679979 100644 --- a/framework/Source/CameraConversion.swift +++ b/framework/Source/CameraConversion.swift @@ -40,7 +40,7 @@ public func convertYUVToRGB(shader:ShaderProgram, luminanceFramebuffer:Framebuff } } - if let secondChrominanceFramebuffer = secondChrominanceFramebuffer, let secondChrominanceTextureProperties = secondChrominanceTextureProperties { + if let secondChrominanceTextureProperties = secondChrominanceTextureProperties { textureProperties = [luminanceTextureProperties, chrominanceTextureProperties, secondChrominanceTextureProperties] } else { textureProperties = [luminanceTextureProperties, chrominanceTextureProperties] diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 83315e87..9b507b70 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -17,7 +17,8 @@ public class RenderView:UIView, ImageConsumer { public var backgroundRenderColor = Color.black public var fillMode = FillMode.preserveAspectRatio public var orientation:ImageOrientation = .portrait - public var sizeInPixels:Size { get { return Size(width:Float(frame.size.width * contentScaleFactor), height:Float(frame.size.height * contentScaleFactor))}} + public var cropFrame: CGRect? + public var sizeInPixels:Size { Size(width:Float(frame.size.width * contentScaleFactor), height:Float(frame.size.height * contentScaleFactor)) } public let sources = SourceContainer() public let maximumInputs:UInt = 1 @@ -174,7 +175,9 @@ public class RenderView:UIView, ImageConsumer { } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - let cleanup: () -> Void = { + let cleanup: () -> Void = { [weak self] in + guard let self = self else { return } + if(self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false) { DispatchQueue.main.async { self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) @@ -187,7 +190,9 @@ public class RenderView:UIView, ImageConsumer { } } - let work: () -> Void = { + let work: () -> Void = { [weak self] in + guard let self = self else { return } + // Fix crash when calling OpenGL when app is not foreground guard self.isAppForeground else { return } @@ -200,8 +205,22 @@ public class RenderView:UIView, ImageConsumer { clearFramebufferWithColor(self.backgroundRenderColor) + let inputTexture: InputTextureProperties + // RenderView will discard content outside cropFrame + // e.g.: renderView.bounds is (0, 0, 414, 805), the actual content size to be rendered is (420, 805) and will be rendered center aligned + // Instead of changing renderView.frame to (-3, 0, 420, 805), we can set cropFrame to (3, 0, 414, 805) + if let cropFrame = self.cropFrame, cropFrame != self.bounds { + let x: Float = max(0, Float(cropFrame.minX / self.bounds.width)) + let y: Float = max(0, Float(cropFrame.minY / self.bounds.height)) + let width: Float = max(0, min(Float(cropFrame.width / self.bounds.width), 1)) + let height: Float = max(0, min(Float(cropFrame.height / self.bounds.height), 1)) + inputTexture = InputTextureProperties(textureCoordinates: Rotation.noRotation.croppedTextureCoordinates(offsetFromOrigin: .init(x, y), cropSize: .init(width: width, height: height)), texture: framebuffer.texture) + } else { + inputTexture = framebuffer.texturePropertiesForTargetOrientation(self.orientation) + } + let scaledVertices = self.fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(self.orientation), toFitSize:self.backingSize) - renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(self.orientation)]) + renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[inputTexture]) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.displayRenderbuffer!) @@ -218,9 +237,7 @@ public class RenderView:UIView, ImageConsumer { DispatchQueue.main.async { self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) - sharedImageProcessingContext.runOperationAsynchronously { - work() - } + sharedImageProcessingContext.runOperationAsynchronously(work) } } else { From 958ae74601840e6a4366847a0605d185ac0950b0 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Thu, 31 Dec 2020 16:31:33 +0800 Subject: [PATCH 301/332] improve(player): return correct current item. --- framework/Source/iOS/MoviePlayer.swift | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 397e32f2..8a8bb664 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -108,7 +108,11 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { guard playerItems.count > 0 else { return nil } if didPlayToEnd { if playerItems.count == 1 { - return nil + if actionAtItemEnd == .advance { + return nil + } else { + return playerItems[0] + } } else { return playerItems[1] } @@ -424,6 +428,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } public func setLoopEnabled(_ enabled: Bool, timeRange: CMTimeRange) { + print("MoviePlayer set loop enable: \(enabled) time range: \(timeRange)") if enabled { if previousPlayerActionAtItemEnd == nil { previousPlayerActionAtItemEnd = actionAtItemEnd From a52a739a32a1c626a9bbc0f26ee3ca8e1e9d6b4e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 22 Dec 2020 18:02:10 +0800 Subject: [PATCH 302/332] improve(MoviePlayer): allow seeking when is paused --- framework/Source/iOS/MoviePlayer.swift | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 8a8bb664..7240640c 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -188,6 +188,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { if seekCurrentItem { isSeeking = true dropFrameBeforeTime = time + _setupDisplayLinkIfNeeded() } item.seek(to: time, toleranceBefore: toleranceBefore, toleranceAfter: toleranceAfter) { [weak self] success in if seekCurrentItem { @@ -353,6 +354,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } else { nextSeeking = SeekingInfo(time: targetTime, toleranceBefore: .zero, toleranceAfter: .zero, shouldPlayAfterSeeking: shouldPlayAfterSeeking) } + _setupDisplayLinkIfNeeded() if assetDuration <= .zero { print("[MoviePlayer] cannot seek since assetDuration is 0. currentItem:\(String(describing: currentItem))") } else { @@ -588,7 +590,7 @@ private extension MoviePlayer { } // There are still some previous frames coming after seeking. So we drop these frames - if let dropFrameBeforeTime = dropFrameBeforeTime, CMTimeCompare(timeForDisplay, dropFrameBeforeTime) <= 0 { + if isPlaying, let dropFrameBeforeTime = dropFrameBeforeTime, CMTimeCompare(timeForDisplay, dropFrameBeforeTime) <= 0 { print("[MoviePlayer] drop frame at time:\(timeForDisplay.seconds), dropFrameBeforeTime:\(dropFrameBeforeTime.seconds)") return } From dc1839eb63077b785549fe4c0097067efdd00194 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 11 Jan 2021 16:47:53 +0800 Subject: [PATCH 303/332] improve: can manually control captureSession configuration --- framework/Source/iOS/Camera.swift | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index ed7ecd62..f710777f 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -248,14 +248,16 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode:stableMode) } - public func configureDeviceInput(location: PhysicalCameraLocation, deviceType: AVCaptureDevice.DeviceType) { + public func configureDeviceInput(location: PhysicalCameraLocation, deviceType: AVCaptureDevice.DeviceType, skipConfiguration: Bool = false) { guard let device = location.device(deviceType) else { fatalError("ERROR: Can't find video devices for \(location)") } do { let newVideoInput = try AVCaptureDeviceInput(device: device) - captureSession.beginConfiguration() + if !skipConfiguration { + captureSession.beginConfiguration() + } captureSession.removeInput(videoInput) if captureSession.canAddInput(newVideoInput) { @@ -269,7 +271,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.addInput(videoInput) } - captureSession.commitConfiguration() + if !skipConfiguration { + captureSession.commitConfiguration() + } } catch let error { fatalError("ERROR: Could not init device: \(error)") } From 7d784d40401d1da5caaf8a8c563183e08145645b Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 19 Jan 2021 18:58:48 +0800 Subject: [PATCH 304/332] improve(Camera): can disable stabilization --- framework/Source/iOS/Camera.swift | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index f710777f..9ff5ea1a 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -98,14 +98,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var deviceType: AVCaptureDevice.DeviceType { return inputCamera.deviceType } - public var backCameraStableMode: AVCaptureVideoStabilizationMode = .standard { + public var backCameraStableMode: AVCaptureVideoStabilizationMode? { didSet { if location == .backFacing { configureStabilization() } } } - public var frontCameraStableMode: AVCaptureVideoStabilizationMode = .standard { + public var frontCameraStableMode: AVCaptureVideoStabilizationMode? { didSet { if location != .backFacing { configureStabilization() @@ -492,7 +492,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } private extension Camera { - static func updateVideoOutput(location: PhysicalCameraLocation, videoOutput: AVCaptureOutput, stableMode: AVCaptureVideoStabilizationMode = .standard) { + static func updateVideoOutput(location: PhysicalCameraLocation, videoOutput: AVCaptureOutput, stableMode: AVCaptureVideoStabilizationMode? = nil) { for connection in videoOutput.connections { if connection.isVideoMirroringSupported { connection.isVideoMirrored = (location == .frontFacingMirrored) @@ -502,7 +502,7 @@ private extension Camera { connection.videoOrientation = .portrait } - if connection.isVideoStabilizationSupported { + if let stableMode = stableMode, connection.isVideoStabilizationSupported { connection.preferredVideoStabilizationMode = stableMode } From 766ba90dfcc07d59239bfb67646858b1adda33b9 Mon Sep 17 00:00:00 2001 From: Cokile Date: Wed, 20 Jan 2021 12:18:04 +0800 Subject: [PATCH 305/332] fix(renderView): resolve main thread checker issue --- framework/Source/iOS/RenderView.swift | 39 ++++++++++++++++----------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 9b507b70..545d5a04 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -25,6 +25,7 @@ public class RenderView:UIView, ImageConsumer { var displayFramebuffer:GLuint? var displayRenderbuffer:GLuint? var backingSize = GLSize(width:0, height:0) + var renderSize = CGSize.zero private var isAppForeground: Bool = true private lazy var displayShader:ShaderProgram = { @@ -52,14 +53,14 @@ public class RenderView:UIView, ImageConsumer { override public var bounds: CGRect { didSet { // Check if the size changed - destroyFramebufferOnSizeChanged(oldSize: oldValue.size, newSize: self.bounds.size) + updateAsSizeChange(oldSize: oldValue.size, newSize: self.bounds.size) } } override public var frame: CGRect { didSet { // Check if the size changed - destroyFramebufferOnSizeChanged(oldSize: oldValue.size, newSize: self.frame.size) + updateAsSizeChange(oldSize: oldValue.size, newSize: self.frame.size) } } @@ -79,6 +80,8 @@ public class RenderView:UIView, ImageConsumer { } self.internalLayer = eaglLayer + + self.renderSize = bounds.size } deinit { @@ -147,6 +150,19 @@ public class RenderView:UIView, ImageConsumer { return true } + func updateAsSizeChange(oldSize: CGSize, newSize: CGSize) { + if oldSize == newSize { return } + + sharedImageProcessingContext.runOperationAsynchronously { + self.updateRenderSize(newSize: newSize) + self.destroyDisplayFramebuffer() + } + } + + func updateRenderSize(newSize: CGSize) { + self.renderSize = newSize + } + func destroyDisplayFramebuffer() { if let displayFramebuffer = self.displayFramebuffer { var temporaryFramebuffer = displayFramebuffer @@ -160,15 +176,6 @@ public class RenderView:UIView, ImageConsumer { } } - func destroyFramebufferOnSizeChanged(oldSize: CGSize, newSize: CGSize) { - if(oldSize != newSize) { - // Destroy the displayFramebuffer so we render at the correct size for the next frame - sharedImageProcessingContext.runOperationAsynchronously{ - self.destroyDisplayFramebuffer() - } - } - } - func activateDisplayFramebuffer() { glBindFramebuffer(GLenum(GL_FRAMEBUFFER), displayFramebuffer!) glViewport(0, 0, backingSize.width, backingSize.height) @@ -209,11 +216,11 @@ public class RenderView:UIView, ImageConsumer { // RenderView will discard content outside cropFrame // e.g.: renderView.bounds is (0, 0, 414, 805), the actual content size to be rendered is (420, 805) and will be rendered center aligned // Instead of changing renderView.frame to (-3, 0, 420, 805), we can set cropFrame to (3, 0, 414, 805) - if let cropFrame = self.cropFrame, cropFrame != self.bounds { - let x: Float = max(0, Float(cropFrame.minX / self.bounds.width)) - let y: Float = max(0, Float(cropFrame.minY / self.bounds.height)) - let width: Float = max(0, min(Float(cropFrame.width / self.bounds.width), 1)) - let height: Float = max(0, min(Float(cropFrame.height / self.bounds.height), 1)) + if let cropFrame = self.cropFrame, cropFrame != CGRect(origin: .zero, size: self.renderSize) { + let x: Float = max(0, Float(cropFrame.minX / self.renderSize.width)) + let y: Float = max(0, Float(cropFrame.minY / self.renderSize.height)) + let width: Float = max(0, min(Float(cropFrame.width / self.renderSize.width), 1)) + let height: Float = max(0, min(Float(cropFrame.height / self.renderSize.height), 1)) inputTexture = InputTextureProperties(textureCoordinates: Rotation.noRotation.croppedTextureCoordinates(offsetFromOrigin: .init(x, y), cropSize: .init(width: width, height: height)), texture: framebuffer.texture) } else { inputTexture = framebuffer.texturePropertiesForTargetOrientation(self.orientation) From 994c237baf0a2b3eecbb86cd3ef4accca9486047 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 31 Jan 2021 15:42:16 +0800 Subject: [PATCH 306/332] chore: improve code style by lint --- .../SimpleVideoFilter/Source/main.swift | 6 +- .../SimpleVideoFilter/Source/main.swift | 10 +- .../FilterShowcase/AppDelegate.swift | 6 +- .../FilterShowcase/FilterOperationTypes.swift | 35 +- .../FilterShowcase/FilterOperations.swift | 1326 ++++++++--------- .../FilterShowcaseWindowController.swift | 23 +- .../SimpleImageFilter/AppDelegate.swift | 10 +- .../SimpleMovieFilter/AppDelegate.swift | 10 +- .../SimpleVideoFilter/AppDelegate.swift | 10 +- .../SimpleVideoRecorder/AppDelegate.swift | 20 +- .../FilterShowcaseSwift/AppDelegate.swift | 2 - .../FilterDisplayViewController.swift | 17 +- .../FilterListViewController.swift | 8 +- .../SimpleImageFilter/AppDelegate.swift | 2 - .../SimpleImageFilter/ViewController.swift | 19 +- .../SimpleMovieEncoding/AppDelegate.swift | 4 - .../SimpleMovieEncoding/ViewController.swift | 80 +- .../SimpleMovieFilter/AppDelegate.swift | 4 - .../SimpleMovieFilter/ViewController.swift | 14 +- .../SimpleVideoFilter/AppDelegate.swift | 6 - .../SimpleVideoFilter/ViewController.swift | 10 +- .../SimpleVideoRecorder/AppDelegate.swift | 3 - .../SimpleVideoRecorder/ViewController.swift | 30 +- framework/Package.swift | 16 +- framework/Packages/CFreeGLUT/Package.swift | 1 + framework/Packages/COpenGL/Package.swift | 1 + framework/Packages/COpenGLES/Package.swift | 1 + framework/Packages/CVideo4Linux/Package.swift | 1 + framework/Packages/CVideoCore/Package.swift | 1 + framework/Source/BasicOperation.swift | 76 +- framework/Source/CameraConversion.swift | 32 +- framework/Source/Color.swift | 22 +- framework/Source/FillMode.swift | 37 +- framework/Source/Framebuffer.swift | 92 +- framework/Source/FramebufferCache.swift | 31 +- framework/Source/ImageGenerator.swift | 12 +- framework/Source/ImageOrientation.swift | 2 +- framework/Source/Linux/GLUTRenderWindow.swift | 20 +- .../Source/Linux/OpenGLContext-RPi.swift | 30 +- framework/Source/Linux/OpenGLContext.swift | 29 +- framework/Source/Linux/RPiRenderWindow.swift | 50 +- framework/Source/Linux/V4LCamera.swift | 2 +- framework/Source/Mac/Camera.swift | 112 +- framework/Source/Mac/MovieInput.swift | 79 +- framework/Source/Mac/MovieOutput.swift | 101 +- framework/Source/Mac/OpenGLContext.swift | 46 +- framework/Source/Mac/PictureInput.swift | 75 +- framework/Source/Mac/PictureOutput.swift | 60 +- framework/Source/Mac/RenderView.swift | 16 +- framework/Source/Matrix.swift | 37 +- framework/Source/OpenGLContext_Shared.swift | 65 +- framework/Source/OpenGLRendering.swift | 84 +- framework/Source/OperationGroup.swift | 16 +- .../Source/Operations/AdaptiveThreshold.swift | 6 +- framework/Source/Operations/AddBlend.swift | 5 +- framework/Source/Operations/AlphaBlend.swift | 8 +- .../Source/Operations/AmatorkaFilter.swift | 7 +- .../Operations/AverageColorExtractor.swift | 20 +- .../AverageLuminanceExtractor.swift | 16 +- .../AverageLuminanceThreshold.swift | 6 +- .../Source/Operations/BilateralBlur.swift | 6 +- framework/Source/Operations/BoxBlur.swift | 20 +- .../Operations/BrightnessAdjustment.swift | 8 +- .../Source/Operations/BulgeDistortion.swift | 14 +- .../Operations/CGAColorspaceFilter.swift | 4 +- .../Operations/CannyEdgeDetection.swift | 22 +- .../Source/Operations/ChromaKeyBlend.swift | 14 +- .../Source/Operations/ChromaKeying.swift | 14 +- .../Source/Operations/CircleGenerator.swift | 12 +- .../Source/Operations/ClosingFilter.swift | 6 +- framework/Source/Operations/ColorBlend.swift | 4 +- .../Source/Operations/ColorBurnBlend.swift | 4 +- .../Source/Operations/ColorDodgeBlend.swift | 4 +- .../Source/Operations/ColorInversion.swift | 4 +- .../Operations/ColorLocalBinaryPattern.swift | 4 +- .../Source/Operations/ColorMatrixFilter.swift | 11 +- .../ColourFASTFeatureDetection.swift | 8 +- .../Operations/ContrastAdjustment.swift | 8 +- .../Source/Operations/Convolution3x3.swift | 6 +- framework/Source/Operations/Crop.swift | 26 +- .../Operations/CrosshairGenerator.swift | 22 +- framework/Source/Operations/Crosshatch.swift | 12 +- framework/Source/Operations/DarkenBlend.swift | 4 +- .../Source/Operations/DifferenceBlend.swift | 4 +- framework/Source/Operations/Dilation.swift | 18 +- .../Source/Operations/DissolveBlend.swift | 8 +- framework/Source/Operations/DivideBlend.swift | 4 +- .../Source/Operations/EmbossFilter.swift | 10 +- framework/Source/Operations/Erosion.swift | 18 +- .../Source/Operations/ExclusionBlend.swift | 4 +- .../Operations/ExposureAdjustment.swift | 8 +- framework/Source/Operations/FalseColor.swift | 10 +- .../Source/Operations/GammaAdjustment.swift | 8 +- .../Source/Operations/GaussianBlur.swift | 71 +- .../Operations/GlassSphereRefraction.swift | 16 +- framework/Source/Operations/Halftone.swift | 8 +- .../Source/Operations/HardLightBlend.swift | 4 +- .../Operations/HarrisCornerDetector.swift | 34 +- framework/Source/Operations/Haze.swift | 12 +- .../Source/Operations/HighPassFilter.swift | 6 +- .../Operations/HighlightAndShadowTint.swift | 18 +- .../Operations/HighlightsAndShadows.swift | 12 +- framework/Source/Operations/Histogram.swift | 26 +- .../Source/Operations/HistogramDisplay.swift | 4 +- .../Operations/HistogramEqualization.swift | 33 +- .../Source/Operations/HueAdjustment.swift | 8 +- framework/Source/Operations/HueBlend.swift | 4 +- framework/Source/Operations/ImageBuffer.swift | 10 +- .../Source/Operations/KuwaharaFilter.swift | 8 +- .../Operations/KuwaharaRadius3Filter.swift | 4 +- .../Source/Operations/LanczosResampling.swift | 14 +- framework/Source/Operations/Laplacian.swift | 4 +- .../Source/Operations/LevelsAdjustment.swift | 24 +- .../Source/Operations/LightenBlend.swift | 4 +- .../Source/Operations/LineGenerator.swift | 29 +- .../Source/Operations/LinearBurnBlend.swift | 4 +- .../Operations/LocalBinaryPattern.swift | 4 +- .../Source/Operations/LookupFilter.swift | 10 +- .../Source/Operations/LowPassFilter.swift | 6 +- framework/Source/Operations/Luminance.swift | 4 +- .../Operations/LuminanceRangeReduction.swift | 8 +- .../Operations/LuminanceThreshold.swift | 8 +- .../Source/Operations/LuminosityBlend.swift | 4 +- .../Source/Operations/MedianFilter.swift | 4 +- .../Source/Operations/MissEtikateFilter.swift | 5 +- .../Source/Operations/MonochromeFilter.swift | 12 +- framework/Source/Operations/MotionBlur.swift | 12 +- .../Source/Operations/MotionDetector.swift | 8 +- .../Source/Operations/MultiplyBlend.swift | 4 +- .../Operations/NobleCornerDetector.swift | 4 +- framework/Source/Operations/NormalBlend.swift | 4 +- .../Source/Operations/OpacityAdjustment.swift | 8 +- .../Source/Operations/OpeningFilter.swift | 6 +- .../Source/Operations/OverlayBlend.swift | 4 +- .../Source/Operations/PinchDistortion.swift | 14 +- framework/Source/Operations/Pixellate.swift | 8 +- .../Source/Operations/PolarPixellate.swift | 10 +- framework/Source/Operations/PolkaDot.swift | 12 +- framework/Source/Operations/Posterize.swift | 8 +- .../Operations/PrewittEdgeDetection.swift | 8 +- .../Operations/RGBAdjustmentFilter.swift | 16 +- framework/Source/Operations/ResizeCrop.swift | 10 +- .../Operations/SaturationAdjustment.swift | 8 +- .../Source/Operations/SaturationBlend.swift | 4 +- framework/Source/Operations/ScreenBlend.swift | 4 +- .../Source/Operations/SepiaToneFilter.swift | 6 +- framework/Source/Operations/Sharpen.swift | 12 +- .../Operations/ShiTomasiFeatureDetector.swift | 4 +- .../SingleComponentGaussianBlur.swift | 22 +- .../Source/Operations/SketchFilter.swift | 8 +- .../Source/Operations/SmoothToonFilter.swift | 10 +- .../Operations/SobelEdgeDetection.swift | 8 +- .../Source/Operations/SoftElegance.swift | 9 +- .../Source/Operations/SoftLightBlend.swift | 4 +- framework/Source/Operations/Solarize.swift | 8 +- .../Operations/SolidColorGenerator.swift | 3 +- .../Source/Operations/SourceOverBlend.swift | 4 +- .../Source/Operations/SphereRefraction.swift | 16 +- .../Source/Operations/StretchDistortion.swift | 6 +- .../Source/Operations/SubtractBlend.swift | 4 +- .../Source/Operations/SwirlDistortion.swift | 14 +- .../Source/Operations/ThresholdSketch.swift | 10 +- .../ThresholdSobelEdgeDetection.swift | 12 +- framework/Source/Operations/TiltShift.swift | 22 +- framework/Source/Operations/ToonFilter.swift | 12 +- .../Operations/TransformOperation.swift | 31 +- framework/Source/Operations/UnsharpMask.swift | 8 +- framework/Source/Operations/Vibrance.swift | 8 +- framework/Source/Operations/Vignette.swift | 18 +- .../Source/Operations/WhiteBalance.swift | 12 +- framework/Source/Operations/ZoomBlur.swift | 10 +- framework/Source/Operations/iOSBlur.swift | 16 +- framework/Source/Pipeline.swift | 120 +- framework/Source/Position.swift | 10 +- framework/Source/RawDataInput.swift | 7 +- framework/Source/RawDataOutput.swift | 12 +- framework/Source/SerialDispatch.swift | 38 +- framework/Source/ShaderProgram.swift | 137 +- framework/Source/ShaderUniformSettings.swift | 40 +- framework/Source/Size.swift | 6 +- framework/Source/TextureInput.swift | 10 +- framework/Source/TextureOutput.swift | 6 +- .../Source/TextureSamplingOperation.swift | 10 +- framework/Source/Timestamp.swift | 30 +- framework/Source/TwoStageOperation.swift | 36 +- framework/Source/iOS/CILookupFilter.swift | 12 +- framework/Source/iOS/Camera.swift | 135 +- .../Source/iOS/FramebufferGenerator.swift | 22 +- framework/Source/iOS/MovieCache.swift | 18 +- framework/Source/iOS/MovieInput.swift | 150 +- framework/Source/iOS/MovieOutput.swift | 104 +- framework/Source/iOS/MoviePlayer.swift | 14 +- framework/Source/iOS/OpenGLContext.swift | 57 +- framework/Source/iOS/PictureInput.swift | 77 +- framework/Source/iOS/PictureOutput.swift | 64 +- framework/Source/iOS/RenderView.swift | 68 +- framework/Source/iOS/SpeakerOutput.swift | 87 +- framework/Tests/Pipeline_Tests.swift | 36 +- framework/Tests/ShaderProgram_Tests.swift | 10 +- 199 files changed, 2636 insertions(+), 2750 deletions(-) diff --git a/examples/Linux-OpenGL/SimpleVideoFilter/Source/main.swift b/examples/Linux-OpenGL/SimpleVideoFilter/Source/main.swift index 85703e56..7f753bd5 100755 --- a/examples/Linux-OpenGL/SimpleVideoFilter/Source/main.swift +++ b/examples/Linux-OpenGL/SimpleVideoFilter/Source/main.swift @@ -1,11 +1,11 @@ import GPUImage // For now, GLUT initialization is done in the render window, so that must come first in sequence -let renderWindow = GLUTRenderWindow(width:1280, height:720, title:"Simple Video Filter") -let camera = V4LCamera(size:Size(width:1280.0, height:720.0)) +let renderWindow = GLUTRenderWindow(width: 1280, height: 720, title: "Simple Video Filter") +let camera = V4LCamera(size: Size(width: 1280.0, height: 720.0)) let edgeDetection = SobelEdgeDetection() camera --> edgeDetection --> renderWindow camera.startCapture() -renderWindow.loopWithFunction(camera.grabFrame) \ No newline at end of file +renderWindow.loopWithFunction(camera.grabFrame) diff --git a/examples/Linux-RPi/SimpleVideoFilter/Source/main.swift b/examples/Linux-RPi/SimpleVideoFilter/Source/main.swift index f33fa9ea..16f4a639 100755 --- a/examples/Linux-RPi/SimpleVideoFilter/Source/main.swift +++ b/examples/Linux-RPi/SimpleVideoFilter/Source/main.swift @@ -1,15 +1,15 @@ import GPUImage // For now, rendering requires the window to be created first -let renderWindow = RPiRenderWindow(width:1280, height:720) -let camera = V4LCamera(size:Size(width:1280.0, height:720.0)) +let renderWindow = RPiRenderWindow(width: 1280, height: 720) +let camera = V4LCamera(size: Size(width: 1280.0, height: 720.0)) let edgeDetection = SobelEdgeDetection() camera --> edgeDetection --> renderWindow -var terminate:Int = 0 +var terminate: Int = 0 camera.startCapture() -while (terminate == 0) { +while terminate == 0 { camera.grabFrame() -} \ No newline at end of file +} diff --git a/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift b/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift index 19f72e6a..4cc95db4 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift @@ -2,14 +2,12 @@ import Cocoa @NSApplicationMain class AppDelegate: NSObject, NSApplicationDelegate { - @IBOutlet weak var window: NSWindow! - var windowController:FilterShowcaseWindowController? + var windowController: FilterShowcaseWindowController? func applicationDidFinishLaunching(_ aNotification: Notification) { - self.windowController = FilterShowcaseWindowController(windowNibName:"FilterShowcaseWindowController") + self.windowController = FilterShowcaseWindowController(windowNibName: "FilterShowcaseWindowController") self.windowController?.showWindow(self) } } - diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift index bd479310..21841e2a 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift @@ -3,7 +3,7 @@ import GPUImage enum FilterSliderSetting { case disabled - case enabled(minimumValue:Float, maximumValue:Float, initialValue:Float) + case enabled(minimumValue: Float, maximumValue: Float, initialValue: Float) } typealias FilterSetupFunction = (Camera, ImageProcessingOperation, RenderView) -> ImageSource? @@ -11,33 +11,33 @@ typealias FilterSetupFunction = (Camera, ImageProcessingOperation, RenderView) - enum FilterOperationType { case singleInput case blend - case custom(filterSetupFunction:FilterSetupFunction) + case custom(filterSetupFunction: FilterSetupFunction) } protocol FilterOperationInterface { var filter: ImageProcessingOperation { get } - var secondInput:ImageSource? { get } + var secondInput: ImageSource? { get } var listName: String { get } var titleName: String { get } - var sliderConfiguration: FilterSliderSetting { get } - var filterOperationType: FilterOperationType { get } + var sliderConfiguration: FilterSliderSetting { get } + var filterOperationType: FilterOperationType { get } - func configureCustomFilter(_ secondInput:ImageSource?) - func updateBasedOnSliderValue(_ sliderValue:Float) + func configureCustomFilter(_ secondInput: ImageSource?) + func updateBasedOnSliderValue(_ sliderValue: Float) } class FilterOperation: FilterOperationInterface { - lazy var internalFilter:FilterClass = { + lazy var internalFilter: FilterClass = { return self.filterCreationFunction() }() let filterCreationFunction:() -> FilterClass - var secondInput:ImageSource? - let listName:String - let titleName:String - let sliderConfiguration:FilterSliderSetting - let filterOperationType:FilterOperationType - let sliderUpdateCallback: ((FilterClass, Float) -> ())? - init(filter:@escaping () -> FilterClass, listName: String, titleName: String, sliderConfiguration: FilterSliderSetting, sliderUpdateCallback:((FilterClass, Float) -> ())?, filterOperationType: FilterOperationType) { + var secondInput: ImageSource? + let listName: String + let titleName: String + let sliderConfiguration: FilterSliderSetting + let filterOperationType: FilterOperationType + let sliderUpdateCallback: ((FilterClass, Float) -> Void)? + init(filter:@escaping () -> FilterClass, listName: String, titleName: String, sliderConfiguration: FilterSliderSetting, sliderUpdateCallback: ((FilterClass, Float) -> Void)?, filterOperationType: FilterOperationType) { self.listName = listName self.titleName = titleName self.sliderConfiguration = sliderConfiguration @@ -50,12 +50,11 @@ class FilterOperation: FilterOperationInt return internalFilter } - func configureCustomFilter(_ secondInput:ImageSource?) { + func configureCustomFilter(_ secondInput: ImageSource?) { self.secondInput = secondInput } - func updateBasedOnSliderValue(_ sliderValue:Float) { + func updateBasedOnSliderValue(_ sliderValue: Float) { sliderUpdateCallback?(internalFilter, sliderValue) } } - diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift index ebd082ed..ce850fd9 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift @@ -1,16 +1,16 @@ import GPUImage import QuartzCore -let filterOperations: Array = [ +let filterOperations: [FilterOperationInterface] = [ FilterOperation( - filter:{AlphaBlend()}, - listName:"Highlights Blur", - titleName:"Gaussian Blur Lumi>0.6(alpha)", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.8), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { AlphaBlend() }, + listName: "Highlights Blur", + titleName: "Gaussian Blur Lumi>0.6(alpha)", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.8), + sliderUpdateCallback: {filter, sliderValue in filter.mix = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let blendFilter = filter as! AlphaBlend blendFilter.removeAllSources() @@ -21,14 +21,14 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{AlphaBlend()}, - listName:"Soft Focus", - titleName:"Gaussian Blur + Alpha Blend", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { AlphaBlend() }, + listName: "Soft Focus", + titleName: "Gaussian Blur + Alpha Blend", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in filter.mix = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let blendFilter = filter as! AlphaBlend blendFilter.removeAllSources() @@ -39,149 +39,149 @@ let filterOperations: Array = [ return blendFilter }) ), - FilterOperation ( - filter:{SaturationAdjustment()}, - listName:"Saturation", - titleName:"Saturation", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + FilterOperation( + filter: { SaturationAdjustment() }, + listName: "Saturation", + titleName: "Saturation", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.saturation = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ContrastAdjustment()}, - listName:"Contrast", - titleName:"Contrast", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ContrastAdjustment() }, + listName: "Contrast", + titleName: "Contrast", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.contrast = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{BrightnessAdjustment()}, - listName:"Brightness", - titleName:"Brightness", - sliderConfiguration:.enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { BrightnessAdjustment() }, + listName: "Brightness", + titleName: "Brightness", + sliderConfiguration: .enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.0), + sliderUpdateCallback: {filter, sliderValue in filter.brightness = sliderValue }, - filterOperationType:.singleInput - ), - FilterOperation( - filter:{LevelsAdjustment()}, - listName:"Levels", - titleName:"Levels", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in - filter.minimum = Color(red:Float(sliderValue), green:Float(sliderValue), blue:Float(sliderValue)) - filter.middle = Color(red:1.0, green:1.0, blue:1.0) - filter.maximum = Color(red:1.0, green:1.0, blue:1.0) - filter.minOutput = Color(red:0.0, green:0.0, blue:0.0) - filter.maxOutput = Color(red:1.0, green:1.0, blue:1.0) + filterOperationType: .singleInput + ), + FilterOperation( + filter: { LevelsAdjustment() }, + listName: "Levels", + titleName: "Levels", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), + sliderUpdateCallback: {filter, sliderValue in + filter.minimum = Color(red: Float(sliderValue), green: Float(sliderValue), blue: Float(sliderValue)) + filter.middle = Color(red: 1.0, green: 1.0, blue: 1.0) + filter.maximum = Color(red: 1.0, green: 1.0, blue: 1.0) + filter.minOutput = Color(red: 0.0, green: 0.0, blue: 0.0) + filter.maxOutput = Color(red: 1.0, green: 1.0, blue: 1.0) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ExposureAdjustment()}, - listName:"Exposure", - titleName:"Exposure", - sliderConfiguration:.enabled(minimumValue:-4.0, maximumValue:4.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ExposureAdjustment() }, + listName: "Exposure", + titleName: "Exposure", + sliderConfiguration: .enabled(minimumValue:-4.0, maximumValue:4.0, initialValue:0.0), + sliderUpdateCallback: {filter, sliderValue in filter.exposure = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{RGBAdjustment()}, - listName:"RGB", - titleName:"RGB", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { RGBAdjustment() }, + listName: "RGB", + titleName: "RGB", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.green = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{HueAdjustment()}, - listName:"Hue", - titleName:"Hue", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:360.0, initialValue:90.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HueAdjustment() }, + listName: "Hue", + titleName: "Hue", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:360.0, initialValue:90.0), + sliderUpdateCallback: {filter, sliderValue in filter.hue = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{WhiteBalance()}, - listName:"White balance", - titleName:"White Balance", - sliderConfiguration:.enabled(minimumValue:2500.0, maximumValue:7500.0, initialValue:5000.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { WhiteBalance() }, + listName: "White balance", + titleName: "White Balance", + sliderConfiguration: .enabled(minimumValue:2500.0, maximumValue:7500.0, initialValue:5000.0), + sliderUpdateCallback: {filter, sliderValue in filter.temperature = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{MonochromeFilter()}, - listName:"Monochrome", - titleName:"Monochrome", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { MonochromeFilter() }, + listName: "Monochrome", + titleName: "Monochrome", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! MonochromeFilter camera --> castFilter --> outputView - castFilter.color = Color(red:0.0, green:0.0, blue:1.0, alpha:1.0) + castFilter.color = Color(red: 0.0, green: 0.0, blue: 1.0, alpha: 1.0) return nil }) ), FilterOperation( - filter:{FalseColor()}, - listName:"False color", - titleName:"False Color", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + filter: { FalseColor() }, + listName: "False color", + titleName: "False Color", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{Sharpen()}, - listName:"Sharpen", - titleName:"Sharpen", - sliderConfiguration:.enabled(minimumValue:-1.0, maximumValue:4.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Sharpen() }, + listName: "Sharpen", + titleName: "Sharpen", + sliderConfiguration: .enabled(minimumValue:-1.0, maximumValue:4.0, initialValue:0.0), + sliderUpdateCallback: {filter, sliderValue in filter.sharpness = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{UnsharpMask()}, - listName:"Unsharp mask", - titleName:"Unsharp Mask", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { UnsharpMask() }, + listName: "Unsharp mask", + titleName: "Unsharp Mask", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{TransformOperation()}, - listName:"Transform (2-D)", - titleName:"Transform (2-D)", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), - sliderUpdateCallback:{(filter, sliderValue) in - filter.transform = Matrix4x4(CGAffineTransform(rotationAngle:CGFloat(sliderValue))) + filter: { TransformOperation() }, + listName: "Transform (2-D)", + titleName: "Transform (2-D)", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), + sliderUpdateCallback: {filter, sliderValue in + filter.transform = Matrix4x4(CGAffineTransform(rotationAngle: CGFloat(sliderValue))) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{TransformOperation()}, - listName:"Transform (3-D)", - titleName:"Transform (3-D)", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), - sliderUpdateCallback:{(filter, sliderValue) in + filter: { TransformOperation() }, + listName: "Transform (3-D)", + titleName: "Transform (3-D)", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), + sliderUpdateCallback: {filter, sliderValue in var perspectiveTransform = CATransform3DIdentity perspectiveTransform.m34 = 0.4 perspectiveTransform.m33 = 0.4 @@ -189,37 +189,37 @@ let filterOperations: Array = [ perspectiveTransform = CATransform3DRotate(perspectiveTransform, CGFloat(sliderValue), 0.0, 1.0, 0.0) filter.transform = Matrix4x4(perspectiveTransform) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Crop()}, - listName:"Crop", - titleName:"Crop", - sliderConfiguration:.enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), - sliderUpdateCallback:{(filter, sliderValue) in - filter.cropSizeInPixels = Size(width:480.0, height:sliderValue) + filter: { Crop() }, + listName: "Crop", + titleName: "Crop", + sliderConfiguration: .enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), + sliderUpdateCallback: {filter, sliderValue in + filter.cropSizeInPixels = Size(width: 480.0, height: sliderValue) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ResizeCrop()}, - listName:"ResizeCrop", - titleName:"ResizeCrop", - sliderConfiguration:.enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), - sliderUpdateCallback:{(filter, sliderValue) in - filter.cropSizeInPixels = Size(width:480.0, height:sliderValue) - }, - filterOperationType:.singleInput + filter: { ResizeCrop() }, + listName: "ResizeCrop", + titleName: "ResizeCrop", + sliderConfiguration: .enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), + sliderUpdateCallback: {filter, sliderValue in + filter.cropSizeInPixels = Size(width: 480.0, height: sliderValue) + }, + filterOperationType: .singleInput ), FilterOperation( - filter:{Luminance()}, - listName:"Masking", - titleName:"Mask Example", - sliderConfiguration:.disabled, + filter: { Luminance() }, + listName: "Masking", + titleName: "Mask Example", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! Luminance - let maskImage = try! PictureInput(imageName:"Mask.png") + let maskImage = try! PictureInput(imageName: "Mask.png") castFilter.drawUnmodifiedImageOutsideOfMask = false castFilter.mask = maskImage maskImage.processImage() @@ -228,128 +228,128 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{GammaAdjustment()}, - listName:"Gamma", - titleName:"Gamma", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:3.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { GammaAdjustment() }, + listName: "Gamma", + titleName: "Gamma", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:3.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.gamma = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), // TODO : Tone curve FilterOperation( - filter:{HighlightsAndShadows()}, - listName:"Highlights and shadows", - titleName:"Highlights and Shadows", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HighlightsAndShadows() }, + listName: "Highlights and shadows", + titleName: "Highlights and Shadows", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.highlights = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Haze()}, - listName:"Haze / UV", - titleName:"Haze / UV", - sliderConfiguration:.enabled(minimumValue:-0.2, maximumValue:0.2, initialValue:0.2), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Haze() }, + listName: "Haze / UV", + titleName: "Haze / UV", + sliderConfiguration: .enabled(minimumValue:-0.2, maximumValue:0.2, initialValue:0.2), + sliderUpdateCallback: {filter, sliderValue in filter.distance = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SepiaToneFilter()}, - listName:"Sepia tone", - titleName:"Sepia Tone", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SepiaToneFilter() }, + listName: "Sepia tone", + titleName: "Sepia Tone", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{AmatorkaFilter()}, - listName:"Amatorka (Lookup)", - titleName:"Amatorka (Lookup)", - sliderConfiguration:.disabled, + filter: { AmatorkaFilter() }, + listName: "Amatorka (Lookup)", + titleName: "Amatorka (Lookup)", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{MissEtikateFilter()}, - listName:"Miss Etikate (Lookup)", - titleName:"Miss Etikate (Lookup)", - sliderConfiguration:.disabled, + filter: { MissEtikateFilter() }, + listName: "Miss Etikate (Lookup)", + titleName: "Miss Etikate (Lookup)", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SoftElegance()}, - listName:"Soft elegance (Lookup)", - titleName:"Soft Elegance (Lookup)", - sliderConfiguration:.disabled, + filter: { SoftElegance() }, + listName: "Soft elegance (Lookup)", + titleName: "Soft Elegance (Lookup)", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ColorInversion()}, - listName:"Color invert", - titleName:"Color Invert", - sliderConfiguration:.disabled, + filter: { ColorInversion() }, + listName: "Color invert", + titleName: "Color Invert", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Solarize()}, - listName:"Solarize", - titleName:"Solarize", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Solarize() }, + listName: "Solarize", + titleName: "Solarize", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Vibrance()}, - listName:"Vibrance", - titleName:"Vibrance", - sliderConfiguration:.enabled(minimumValue:-1.2, maximumValue:1.2, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Vibrance() }, + listName: "Vibrance", + titleName: "Vibrance", + sliderConfiguration: .enabled(minimumValue:-1.2, maximumValue:1.2, initialValue:0.0), + sliderUpdateCallback: {filter, sliderValue in filter.vibrance = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{HighlightAndShadowTint()}, - listName:"Highlight and shadow tint", - titleName:"Highlight / Shadow Tint", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HighlightAndShadowTint() }, + listName: "Highlight and shadow tint", + titleName: "Highlight / Shadow Tint", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), + sliderUpdateCallback: {filter, sliderValue in filter.shadowTintIntensity = sliderValue }, - filterOperationType:.singleInput - ), - FilterOperation ( - filter:{Luminance()}, - listName:"Luminance", - titleName:"Luminance", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput - ), - FilterOperation( - filter:{Histogram(type:.rgb)}, - listName:"Histogram", - titleName:"Histogram", - sliderConfiguration:.enabled(minimumValue:4.0, maximumValue:32.0, initialValue:16.0), - sliderUpdateCallback: {(filter, sliderValue) in + filterOperationType: .singleInput + ), + FilterOperation( + filter: { Luminance() }, + listName: "Luminance", + titleName: "Luminance", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput + ), + FilterOperation( + filter: { Histogram(type: .rgb) }, + listName: "Histogram", + titleName: "Histogram", + sliderConfiguration: .enabled(minimumValue:4.0, maximumValue:32.0, initialValue:16.0), + sliderUpdateCallback: {filter, sliderValue in filter.downsamplingFactor = UInt(round(sliderValue)) }, - filterOperationType:.custom(filterSetupFunction: {(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! Histogram let histogramGraph = HistogramDisplay() - histogramGraph.overriddenOutputSize = Size(width:256.0, height:330.0) + histogramGraph.overriddenOutputSize = Size(width: 256.0, height: 330.0) let blendFilter = AlphaBlend() blendFilter.mix = 0.75 camera --> blendFilter @@ -358,23 +358,23 @@ let filterOperations: Array = [ return blendFilter }) ), - FilterOperation ( - filter:{HistogramEqualization(type:.rgb)}, - listName:"Histogram equalization", - titleName:"Histogram Equalization", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + FilterOperation( + filter: { HistogramEqualization(type: .rgb) }, + listName: "Histogram equalization", + titleName: "Histogram Equalization", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{AverageColorExtractor()}, - listName:"Average color", - titleName:"Average Color", - sliderConfiguration:.disabled, + filter: { AverageColorExtractor() }, + listName: "Average color", + titleName: "Average Color", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! AverageColorExtractor - let colorGenerator = SolidColorGenerator(size:outputView.sizeInPixels) + let colorGenerator = SolidColorGenerator(size: outputView.sizeInPixels) castFilter.extractedColorCallback = {color in colorGenerator.renderColor(color) @@ -385,17 +385,17 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{AverageLuminanceExtractor()}, - listName:"Average luminosity", - titleName:"Average Luminosity", - sliderConfiguration:.disabled, + filter: { AverageLuminanceExtractor() }, + listName: "Average luminosity", + titleName: "Average Luminosity", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! AverageLuminanceExtractor - let colorGenerator = SolidColorGenerator(size:outputView.sizeInPixels) + let colorGenerator = SolidColorGenerator(size: outputView.sizeInPixels) castFilter.extractedLuminanceCallback = {luminosity in - colorGenerator.renderColor(Color(red:luminosity, green:luminosity, blue:luminosity)) + colorGenerator.renderColor(Color(red: luminosity, green: luminosity, blue: luminosity)) } camera --> castFilter @@ -404,161 +404,161 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{LuminanceThreshold()}, - listName:"Luminance threshold", - titleName:"Luminance Threshold", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { LuminanceThreshold() }, + listName: "Luminance threshold", + titleName: "Luminance Threshold", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{AdaptiveThreshold()}, - listName:"Adaptive threshold", - titleName:"Adaptive Threshold", - sliderConfiguration:.enabled(minimumValue:1.0, maximumValue:20.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { AdaptiveThreshold() }, + listName: "Adaptive threshold", + titleName: "Adaptive Threshold", + sliderConfiguration: .enabled(minimumValue:1.0, maximumValue:20.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{AverageLuminanceThreshold()}, - listName:"Average luminance threshold", - titleName:"Avg. Lum. Threshold", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { AverageLuminanceThreshold() }, + listName: "Average luminance threshold", + titleName: "Avg. Lum. Threshold", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.thresholdMultiplier = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Pixellate()}, - listName:"Pixellate", - titleName:"Pixellate", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Pixellate() }, + listName: "Pixellate", + titleName: "Pixellate", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), + sliderUpdateCallback: {filter, sliderValue in filter.fractionalWidthOfAPixel = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{PolarPixellate()}, - listName:"Polar pixellate", - titleName:"Polar Pixellate", - sliderConfiguration:.enabled(minimumValue:-0.1, maximumValue:0.1, initialValue:0.05), - sliderUpdateCallback: {(filter, sliderValue) in - filter.pixelSize = Size(width:sliderValue, height:sliderValue) + filter: { PolarPixellate() }, + listName: "Polar pixellate", + titleName: "Polar Pixellate", + sliderConfiguration: .enabled(minimumValue:-0.1, maximumValue:0.1, initialValue:0.05), + sliderUpdateCallback: {filter, sliderValue in + filter.pixelSize = Size(width: sliderValue, height: sliderValue) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Pixellate()}, - listName:"Masked Pixellate", - titleName:"Masked Pixellate", - sliderConfiguration:.disabled, + filter: { Pixellate() }, + listName: "Masked Pixellate", + titleName: "Masked Pixellate", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! Pixellate castFilter.fractionalWidthOfAPixel = 0.05 // TODO: Find a way to not hardcode these values #if os(iOS) - let circleGenerator = CircleGenerator(size:Size(width:480, height:640)) + let circleGenerator = CircleGenerator(size: Size(width: 480, height: 640)) #else - let circleGenerator = CircleGenerator(size:Size(width:1280, height:720)) + let circleGenerator = CircleGenerator(size: Size(width: 1280, height: 720)) #endif castFilter.mask = circleGenerator - circleGenerator.renderCircleOfRadius(0.25, center:Position.center, circleColor:Color.white, backgroundColor:Color.transparent) + circleGenerator.renderCircleOfRadius(0.25, center: Position.center, circleColor: Color.white, backgroundColor: Color.transparent) camera --> castFilter --> outputView return nil }) ), FilterOperation( - filter:{PolkaDot()}, - listName:"Polka dot", - titleName:"Polka Dot", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { PolkaDot() }, + listName: "Polka dot", + titleName: "Polka Dot", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), + sliderUpdateCallback: {filter, sliderValue in filter.fractionalWidthOfAPixel = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Halftone()}, - listName:"Halftone", - titleName:"Halftone", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:0.05, initialValue:0.01), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Halftone() }, + listName: "Halftone", + titleName: "Halftone", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:0.05, initialValue:0.01), + sliderUpdateCallback: {filter, sliderValue in filter.fractionalWidthOfAPixel = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Crosshatch()}, - listName:"Crosshatch", - titleName:"Crosshatch", - sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.06, initialValue:0.03), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Crosshatch() }, + listName: "Crosshatch", + titleName: "Crosshatch", + sliderConfiguration: .enabled(minimumValue:0.01, maximumValue:0.06, initialValue:0.03), + sliderUpdateCallback: {filter, sliderValue in filter.crossHatchSpacing = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SobelEdgeDetection()}, - listName:"Sobel edge detection", - titleName:"Sobel Edge Detection", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SobelEdgeDetection() }, + listName: "Sobel edge detection", + titleName: "Sobel Edge Detection", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), + sliderUpdateCallback: {filter, sliderValue in filter.edgeStrength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{PrewittEdgeDetection()}, - listName:"Prewitt edge detection", - titleName:"Prewitt Edge Detection", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { PrewittEdgeDetection() }, + listName: "Prewitt edge detection", + titleName: "Prewitt Edge Detection", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.edgeStrength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{CannyEdgeDetection()}, - listName:"Canny edge detection", - titleName:"Canny Edge Detection", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { CannyEdgeDetection() }, + listName: "Canny edge detection", + titleName: "Canny Edge Detection", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ThresholdSobelEdgeDetection()}, - listName:"Threshold edge detection", - titleName:"Threshold Edge Detection", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ThresholdSobelEdgeDetection() }, + listName: "Threshold edge detection", + titleName: "Threshold Edge Detection", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{HarrisCornerDetector()}, - listName:"Harris corner detector", - titleName:"Harris Corner Detector", - sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HarrisCornerDetector() }, + listName: "Harris corner detector", + titleName: "Harris Corner Detector", + sliderConfiguration: .enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! HarrisCornerDetector // TODO: Get this more dynamically sized #if os(iOS) - let crosshairGenerator = CrosshairGenerator(size:Size(width:480, height:640)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 480, height: 640)) #else - let crosshairGenerator = CrosshairGenerator(size:Size(width:1280, height:720)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 1280, height: 720)) #endif crosshairGenerator.crosshairWidth = 15.0 @@ -576,20 +576,20 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{NobleCornerDetector()}, - listName:"Noble corner detector", - titleName:"Noble Corner Detector", - sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { NobleCornerDetector() }, + listName: "Noble corner detector", + titleName: "Noble Corner Detector", + sliderConfiguration: .enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! NobleCornerDetector // TODO: Get this more dynamically sized #if os(iOS) - let crosshairGenerator = CrosshairGenerator(size:Size(width:480, height:640)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 480, height: 640)) #else - let crosshairGenerator = CrosshairGenerator(size:Size(width:1280, height:720)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 1280, height: 720)) #endif crosshairGenerator.crosshairWidth = 15.0 @@ -607,20 +607,20 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{ShiTomasiFeatureDetector()}, - listName:"Shi-Tomasi feature detector", - titleName:"Shi-Tomasi Feature Detector", - sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ShiTomasiFeatureDetector() }, + listName: "Shi-Tomasi feature detector", + titleName: "Shi-Tomasi Feature Detector", + sliderConfiguration: .enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! ShiTomasiFeatureDetector // TODO: Get this more dynamically sized #if os(iOS) - let crosshairGenerator = CrosshairGenerator(size:Size(width:480, height:640)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 480, height: 640)) #else - let crosshairGenerator = CrosshairGenerator(size:Size(width:1280, height:720)) + let crosshairGenerator = CrosshairGenerator(size: Size(width: 1280, height: 720)) #endif crosshairGenerator.crosshairWidth = 15.0 @@ -639,112 +639,112 @@ let filterOperations: Array = [ ), // TODO: Hough transform line detector FilterOperation( - filter:{ColourFASTFeatureDetection()}, - listName:"ColourFAST feature detection", - titleName:"ColourFAST Features", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + filter: { ColourFASTFeatureDetection() }, + listName: "ColourFAST feature detection", + titleName: "ColourFAST Features", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{LowPassFilter()}, - listName:"Low pass", - titleName:"Low Pass", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { LowPassFilter() }, + listName: "Low pass", + titleName: "Low Pass", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in filter.strength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{HighPassFilter()}, - listName:"High pass", - titleName:"High Pass", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { HighPassFilter() }, + listName: "High pass", + titleName: "High Pass", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in filter.strength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), // TODO: Motion detector FilterOperation( - filter:{SketchFilter()}, - listName:"Sketch", - titleName:"Sketch", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SketchFilter() }, + listName: "Sketch", + titleName: "Sketch", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in filter.edgeStrength = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ThresholdSketchFilter()}, - listName:"Threshold Sketch", - titleName:"Threshold Sketch", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ThresholdSketchFilter() }, + listName: "Threshold Sketch", + titleName: "Threshold Sketch", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), + sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ToonFilter()}, - listName:"Toon", - titleName:"Toon", - sliderConfiguration:.disabled, + filter: { ToonFilter() }, + listName: "Toon", + titleName: "Toon", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SmoothToonFilter()}, - listName:"Smooth toon", - titleName:"Smooth Toon", - sliderConfiguration:.enabled(minimumValue:1.0, maximumValue:6.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SmoothToonFilter() }, + listName: "Smooth toon", + titleName: "Smooth Toon", + sliderConfiguration: .enabled(minimumValue:1.0, maximumValue:6.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{TiltShift()}, - listName:"Tilt shift", - titleName:"Tilt Shift", - sliderConfiguration:.enabled(minimumValue:0.2, maximumValue:0.8, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { TiltShift() }, + listName: "Tilt shift", + titleName: "Tilt Shift", + sliderConfiguration: .enabled(minimumValue:0.2, maximumValue:0.8, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in filter.topFocusLevel = sliderValue - 0.1 filter.bottomFocusLevel = sliderValue + 0.1 }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{CGAColorspaceFilter()}, - listName:"CGA colorspace", - titleName:"CGA Colorspace", - sliderConfiguration:.disabled, + filter: { CGAColorspaceFilter() }, + listName: "CGA colorspace", + titleName: "CGA Colorspace", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Posterize()}, - listName:"Posterize", - titleName:"Posterize", - sliderConfiguration:.enabled(minimumValue:1.0, maximumValue:20.0, initialValue:10.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Posterize() }, + listName: "Posterize", + titleName: "Posterize", + sliderConfiguration: .enabled(minimumValue:1.0, maximumValue:20.0, initialValue:10.0), + sliderUpdateCallback: {filter, sliderValue in filter.colorLevels = round(sliderValue) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Convolution3x3()}, - listName:"3x3 convolution", - titleName:"3x3 convolution", - sliderConfiguration:.disabled, + filter: { Convolution3x3() }, + listName: "3x3 convolution", + titleName: "3x3 convolution", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! Convolution3x3 - castFilter.convolutionKernel = Matrix3x3(rowMajorValues:[ + castFilter.convolutionKernel = Matrix3x3(rowMajorValues: [ -1.0, 0.0, 1.0, -2.0, 0.0, 2.0, -1.0, 0.0, 1.0]) @@ -755,38 +755,38 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{EmbossFilter()}, - listName:"Emboss", - titleName:"Emboss", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { EmbossFilter() }, + listName: "Emboss", + titleName: "Emboss", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Laplacian()}, - listName:"Laplacian", - titleName:"Laplacian", - sliderConfiguration:.disabled, + filter: { Laplacian() }, + listName: "Laplacian", + titleName: "Laplacian", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ChromaKeying()}, - listName:"Chroma key", - titleName:"Chroma Key", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.00, initialValue:0.40), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ChromaKeying() }, + listName: "Chroma key", + titleName: "Chroma Key", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.00, initialValue:0.40), + sliderUpdateCallback: {filter, sliderValue in filter.thresholdSensitivity = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! ChromaKeying let blendFilter = AlphaBlend() blendFilter.mix = 1.0 - let inputImage = try! PictureInput(imageName:blendImageName) + let inputImage = try! PictureInput(imageName: blendImageName) inputImage --> blendFilter camera --> castFilter --> blendFilter --> outputView @@ -795,139 +795,139 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{KuwaharaFilter()}, - listName:"Kuwahara", - titleName:"Kuwahara", - sliderConfiguration:.enabled(minimumValue:3.0, maximumValue:9.0, initialValue:3.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { KuwaharaFilter() }, + listName: "Kuwahara", + titleName: "Kuwahara", + sliderConfiguration: .enabled(minimumValue:3.0, maximumValue:9.0, initialValue:3.0), + sliderUpdateCallback: {filter, sliderValue in filter.radius = Int(round(sliderValue)) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{KuwaharaRadius3Filter()}, - listName:"Kuwahara (radius 3)", - titleName:"Kuwahara (Radius 3)", - sliderConfiguration:.disabled, + filter: { KuwaharaRadius3Filter() }, + listName: "Kuwahara (radius 3)", + titleName: "Kuwahara (Radius 3)", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Vignette()}, - listName:"Vignette", - titleName:"Vignette", - sliderConfiguration:.enabled(minimumValue:0.5, maximumValue:0.9, initialValue:0.75), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { Vignette() }, + listName: "Vignette", + titleName: "Vignette", + sliderConfiguration: .enabled(minimumValue:0.5, maximumValue:0.9, initialValue:0.75), + sliderUpdateCallback: {filter, sliderValue in filter.end = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{GaussianBlur()}, - listName:"Gaussian blur", - titleName:"Gaussian Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { GaussianBlur() }, + listName: "Gaussian blur", + titleName: "Gaussian Blur", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{BoxBlur()}, - listName:"Box blur", - titleName:"Box Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { BoxBlur() }, + listName: "Box blur", + titleName: "Box Blur", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{MedianFilter()}, - listName:"Median", - titleName:"Median", - sliderConfiguration:.disabled, + filter: { MedianFilter() }, + listName: "Median", + titleName: "Median", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{BilateralBlur()}, - listName:"Bilateral blur", - titleName:"Bilateral Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:10.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { BilateralBlur() }, + listName: "Bilateral blur", + titleName: "Bilateral Blur", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:10.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.distanceNormalizationFactor = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{MotionBlur()}, - listName:"Motion blur", - titleName:"Motion Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:180.0, initialValue:0.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { MotionBlur() }, + listName: "Motion blur", + titleName: "Motion Blur", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:180.0, initialValue:0.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurAngle = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ZoomBlur()}, - listName:"Zoom blur", - titleName:"Zoom Blur", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.5, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ZoomBlur() }, + listName: "Zoom blur", + titleName: "Zoom Blur", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.5, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.blurSize = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( // TODO: Make this only partially applied to the view - filter:{iOSBlur()}, - listName:"iOS 7 blur", - titleName:"iOS 7 Blur", - sliderConfiguration:.disabled, + filter: { iOSBlur() }, + listName: "iOS 7 blur", + titleName: "iOS 7 Blur", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SwirlDistortion()}, - listName:"Swirl", - titleName:"Swirl", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { SwirlDistortion() }, + listName: "Swirl", + titleName: "Swirl", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderUpdateCallback: {filter, sliderValue in filter.angle = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{BulgeDistortion()}, - listName:"Bulge", - titleName:"Bulge", - sliderConfiguration:.enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { BulgeDistortion() }, + listName: "Bulge", + titleName: "Bulge", + sliderConfiguration: .enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in // filter.scale = sliderValue filter.center = Position(0.5, sliderValue) }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{PinchDistortion()}, - listName:"Pinch", - titleName:"Pinch", - sliderConfiguration:.enabled(minimumValue:-2.0, maximumValue:2.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { PinchDistortion() }, + listName: "Pinch", + titleName: "Pinch", + sliderConfiguration: .enabled(minimumValue:-2.0, maximumValue:2.0, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in filter.scale = sliderValue }, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{SphereRefraction()}, - listName:"Sphere refraction", - titleName:"Sphere Refraction", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), - sliderUpdateCallback:{(filter, sliderValue) in + filter: { SphereRefraction() }, + listName: "Sphere refraction", + titleName: "Sphere Refraction", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), + sliderUpdateCallback: {filter, sliderValue in filter.radius = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! SphereRefraction // Provide a blurred image for a cool-looking background @@ -944,14 +944,14 @@ let filterOperations: Array = [ }) ), FilterOperation( - filter:{GlassSphereRefraction()}, - listName:"Glass sphere", - titleName:"Glass Sphere", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), - sliderUpdateCallback:{(filter, sliderValue) in + filter: { GlassSphereRefraction() }, + listName: "Glass sphere", + titleName: "Glass Sphere", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), + sliderUpdateCallback: {filter, sliderValue in filter.radius = sliderValue }, - filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType: .custom(filterSetupFunction: {camera, filter, outputView in let castFilter = filter as! GlassSphereRefraction // Provide a blurred image for a cool-looking background @@ -967,245 +967,245 @@ let filterOperations: Array = [ return blendFilter }) ), - FilterOperation ( - filter:{StretchDistortion()}, - listName:"Stretch", - titleName:"Stretch", - sliderConfiguration:.disabled, + FilterOperation( + filter: { StretchDistortion() }, + listName: "Stretch", + titleName: "Stretch", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Dilation()}, - listName:"Dilation", - titleName:"Dilation", - sliderConfiguration:.disabled, + filter: { Dilation() }, + listName: "Dilation", + titleName: "Dilation", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{Erosion()}, - listName:"Erosion", - titleName:"Erosion", - sliderConfiguration:.disabled, + filter: { Erosion() }, + listName: "Erosion", + titleName: "Erosion", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{OpeningFilter()}, - listName:"Opening", - titleName:"Opening", - sliderConfiguration:.disabled, + filter: { OpeningFilter() }, + listName: "Opening", + titleName: "Opening", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), FilterOperation( - filter:{ClosingFilter()}, - listName:"Closing", - titleName:"Closing", - sliderConfiguration:.disabled, + filter: { ClosingFilter() }, + listName: "Closing", + titleName: "Closing", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.singleInput + filterOperationType: .singleInput ), // TODO: Perlin noise // TODO: JFAVoronoi // TODO: Mosaic FilterOperation( - filter:{LocalBinaryPattern()}, - listName:"Local binary pattern", - titleName:"Local Binary Pattern", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + filter: { LocalBinaryPattern() }, + listName: "Local binary pattern", + titleName: "Local Binary Pattern", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{ColorLocalBinaryPattern()}, - listName:"Local binary pattern (color)", - titleName:"Local Binary Pattern (Color)", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.singleInput + filter: { ColorLocalBinaryPattern() }, + listName: "Local binary pattern (color)", + titleName: "Local Binary Pattern (Color)", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .singleInput ), FilterOperation( - filter:{DissolveBlend()}, - listName:"Dissolve blend", - titleName:"Dissolve Blend", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { DissolveBlend() }, + listName: "Dissolve blend", + titleName: "Dissolve Blend", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderUpdateCallback: {filter, sliderValue in filter.mix = sliderValue }, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{ChromaKeyBlend()}, - listName:"Chroma key blend (green)", - titleName:"Chroma Key (Green)", - sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.4), - sliderUpdateCallback: {(filter, sliderValue) in + filter: { ChromaKeyBlend() }, + listName: "Chroma key blend (green)", + titleName: "Chroma Key (Green)", + sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.4), + sliderUpdateCallback: {filter, sliderValue in filter.thresholdSensitivity = sliderValue }, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{AddBlend()}, - listName:"Add blend", - titleName:"Add Blend", - sliderConfiguration:.disabled, + filter: { AddBlend() }, + listName: "Add blend", + titleName: "Add Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{DivideBlend()}, - listName:"Divide blend", - titleName:"Divide Blend", - sliderConfiguration:.disabled, + filter: { DivideBlend() }, + listName: "Divide blend", + titleName: "Divide Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{MultiplyBlend()}, - listName:"Multiply blend", - titleName:"Multiply Blend", - sliderConfiguration:.disabled, + filter: { MultiplyBlend() }, + listName: "Multiply blend", + titleName: "Multiply Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{OverlayBlend()}, - listName:"Overlay blend", - titleName:"Overlay Blend", - sliderConfiguration:.disabled, + filter: { OverlayBlend() }, + listName: "Overlay blend", + titleName: "Overlay Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{LightenBlend()}, - listName:"Lighten blend", - titleName:"Lighten Blend", - sliderConfiguration:.disabled, + filter: { LightenBlend() }, + listName: "Lighten blend", + titleName: "Lighten Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{DarkenBlend()}, - listName:"Darken blend", - titleName:"Darken Blend", - sliderConfiguration:.disabled, + filter: { DarkenBlend() }, + listName: "Darken blend", + titleName: "Darken Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{ColorBurnBlend()}, - listName:"Color burn blend", - titleName:"Color Burn Blend", - sliderConfiguration:.disabled, + filter: { ColorBurnBlend() }, + listName: "Color burn blend", + titleName: "Color Burn Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{ColorDodgeBlend()}, - listName:"Color dodge blend", - titleName:"Color Dodge Blend", - sliderConfiguration:.disabled, + filter: { ColorDodgeBlend() }, + listName: "Color dodge blend", + titleName: "Color Dodge Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{LinearBurnBlend()}, - listName:"Linear burn blend", - titleName:"Linear Burn Blend", - sliderConfiguration:.disabled, + filter: { LinearBurnBlend() }, + listName: "Linear burn blend", + titleName: "Linear Burn Blend", + sliderConfiguration: .disabled, sliderUpdateCallback: nil, - filterOperationType:.blend + filterOperationType: .blend ), FilterOperation( - filter:{ScreenBlend()}, - listName:"Screen blend", - titleName:"Screen Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { ScreenBlend() }, + listName: "Screen blend", + titleName: "Screen Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{DifferenceBlend()}, - listName:"Difference blend", - titleName:"Difference Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { DifferenceBlend() }, + listName: "Difference blend", + titleName: "Difference Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{SubtractBlend()}, - listName:"Subtract blend", - titleName:"Subtract Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { SubtractBlend() }, + listName: "Subtract blend", + titleName: "Subtract Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{ExclusionBlend()}, - listName:"Exclusion blend", - titleName:"Exclusion Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { ExclusionBlend() }, + listName: "Exclusion blend", + titleName: "Exclusion Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{HardLightBlend()}, - listName:"Hard light blend", - titleName:"Hard Light Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { HardLightBlend() }, + listName: "Hard light blend", + titleName: "Hard Light Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{SoftLightBlend()}, - listName:"Soft light blend", - titleName:"Soft Light Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { SoftLightBlend() }, + listName: "Soft light blend", + titleName: "Soft Light Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{ColorBlend()}, - listName:"Color blend", - titleName:"Color Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { ColorBlend() }, + listName: "Color blend", + titleName: "Color Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{HueBlend()}, - listName:"Hue blend", - titleName:"Hue Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { HueBlend() }, + listName: "Hue blend", + titleName: "Hue Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{SaturationBlend()}, - listName:"Saturation blend", - titleName:"Saturation Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { SaturationBlend() }, + listName: "Saturation blend", + titleName: "Saturation Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{LuminosityBlend()}, - listName:"Luminosity blend", - titleName:"Luminosity Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend + filter: { LuminosityBlend() }, + listName: "Luminosity blend", + titleName: "Luminosity Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend ), FilterOperation( - filter:{NormalBlend()}, - listName:"Normal blend", - titleName:"Normal Blend", - sliderConfiguration:.disabled, - sliderUpdateCallback:nil, - filterOperationType:.blend - ), + filter: { NormalBlend() }, + listName: "Normal blend", + titleName: "Normal Blend", + sliderConfiguration: .disabled, + sliderUpdateCallback: nil, + filterOperationType: .blend + ) // TODO: Poisson blend ] diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift index a65d8091..991911da 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift @@ -5,14 +5,13 @@ import AVFoundation let blendImageName = "Lambeau.jpg" class FilterShowcaseWindowController: NSWindowController { - @IBOutlet var filterView: RenderView! @IBOutlet weak var filterSlider: NSSlider! - dynamic var currentSliderValue:Float = 0.5 { + dynamic var currentSliderValue: Float = 0.5 { willSet(newSliderValue) { - switch (currentFilterOperation!.sliderConfiguration) { + switch currentFilterOperation!.sliderConfiguration { case .enabled: currentFilterOperation!.updateBasedOnSliderValue(newSliderValue) case .disabled: break } @@ -20,9 +19,9 @@ class FilterShowcaseWindowController: NSWindowController { } var currentFilterOperation: FilterOperationInterface? - var videoCamera:Camera! - lazy var blendImage:PictureInput = { - return PictureInput(imageName:blendImageName) + var videoCamera: Camera! + lazy var blendImage: PictureInput = { + return PictureInput(imageName: blendImageName) }() var currentlySelectedRow = 1 @@ -30,7 +29,7 @@ class FilterShowcaseWindowController: NSWindowController { super.windowDidLoad() do { - videoCamera = try Camera(sessionPreset:AVCaptureSessionPreset1280x720) + videoCamera = try Camera(sessionPreset: AVCaptureSessionPreset1280x720) videoCamera.runBenchmark = true videoCamera.startCapture() } catch { @@ -39,8 +38,8 @@ class FilterShowcaseWindowController: NSWindowController { self.changeSelectedRow(0) } - func changeSelectedRow(_ row:Int) { - guard (currentlySelectedRow != row) else { return } + func changeSelectedRow(_ row: Int) { + guard currentlySelectedRow != row else { return } currentlySelectedRow = row // Clean up everything from the previous filter selection first @@ -81,12 +80,12 @@ class FilterShowcaseWindowController: NSWindowController { // MARK: - // MARK: Table view delegate and datasource methods - func numberOfRowsInTableView(_ aTableView:NSTableView!) -> Int { + func numberOfRowsInTableView(_ aTableView: NSTableView!) -> Int { return filterOperations.count } - func tableView(_ aTableView:NSTableView!, objectValueForTableColumn aTableColumn:NSTableColumn!, row rowIndex:Int) -> AnyObject! { - let filterInList:FilterOperationInterface = filterOperations[rowIndex] + func tableView(_ aTableView: NSTableView!, objectValueForTableColumn aTableColumn: NSTableColumn!, row rowIndex: Int) -> AnyObject! { + let filterInList: FilterOperationInterface = filterOperations[rowIndex] return filterInList.listName as NSString } diff --git a/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift b/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift index 3586c02f..6b36170e 100755 --- a/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift +++ b/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift @@ -3,12 +3,11 @@ import GPUImage @NSApplicationMain class AppDelegate: NSObject, NSApplicationDelegate { - @IBOutlet weak var window: NSWindow! @IBOutlet weak var renderView: RenderView! - var image:PictureInput! - var filter:SaturationAdjustment! + var image: PictureInput! + var filter: SaturationAdjustment! dynamic var filterValue = 1.0 { didSet { @@ -18,8 +17,8 @@ class AppDelegate: NSObject, NSApplicationDelegate { } func applicationDidFinishLaunching(_ aNotification: Notification) { - let inputImage = NSImage(named:"Lambeau.jpg")! - image = PictureInput(image:inputImage) + let inputImage = NSImage(named: "Lambeau.jpg")! + image = PictureInput(image: inputImage) filter = SaturationAdjustment() @@ -27,4 +26,3 @@ class AppDelegate: NSObject, NSApplicationDelegate { image.processImage() } } - diff --git a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift index 188cc4f1..99bd646c 100644 --- a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift +++ b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift @@ -3,12 +3,11 @@ import GPUImage @NSApplicationMain class AppDelegate: NSObject, NSApplicationDelegate { - @IBOutlet weak var window: NSWindow! @IBOutlet weak var renderView: RenderView! - var movie:MovieInput! - var filter:Pixellate! + var movie: MovieInput! + var filter: Pixellate! dynamic var filterValue = 0.05 { didSet { @@ -18,10 +17,10 @@ class AppDelegate: NSObject, NSApplicationDelegate { func applicationDidFinishLaunching(_ aNotification: Notification) { let bundleURL = Bundle.main.resourceURL! - let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + let movieURL = URL(string: "sample_iPod.m4v", relativeTo: bundleURL)! do { - movie = try MovieInput(url:movieURL, playAtActualSpeed:true) + movie = try MovieInput(url: movieURL, playAtActualSpeed: true) filter = Pixellate() movie --> filter --> renderView movie.runBenchmark = true @@ -31,4 +30,3 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } } - diff --git a/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift b/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift index 875309df..c87e16d2 100755 --- a/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift +++ b/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift @@ -7,10 +7,10 @@ class AppDelegate: NSObject, NSApplicationDelegate { @IBOutlet weak var window: NSWindow! @IBOutlet weak var renderView: RenderView! - var camera:Camera! - var filter:Pixellate! + var camera: Camera! + var filter: Pixellate! - dynamic var filterSetting:Float = 0.01 { + dynamic var filterSetting: Float = 0.01 { didSet { filter.fractionalWidthOfAPixel = filterSetting } @@ -22,13 +22,13 @@ class AppDelegate: NSObject, NSApplicationDelegate { let okayButton = imageSavingDialog.runModal() if okayButton == NSModalResponseOK { - filter.saveNextFrameToURL(imageSavingDialog.url!, format:.png) + filter.saveNextFrameToURL(imageSavingDialog.url!, format: .png) } } func applicationDidFinishLaunching(_ aNotification: Notification) { do { - camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) + camera = try Camera(sessionPreset: AVCaptureSessionPreset640x480) filter = Pixellate() camera --> filter --> renderView diff --git a/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift b/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift index cbf2f6c7..18fad03a 100644 --- a/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift +++ b/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift @@ -4,18 +4,17 @@ import AVFoundation @NSApplicationMain class AppDelegate: NSObject, NSApplicationDelegate { + @IBOutlet weak var window: NSWindow! + @IBOutlet var renderView: RenderView! - @IBOutlet weak var window:NSWindow! - @IBOutlet var renderView:RenderView! - - var camera:Camera! - var filter:SmoothToonFilter! - var movieOutput:MovieOutput? + var camera: Camera! + var filter: SmoothToonFilter! + var movieOutput: MovieOutput? var isRecording = false func applicationDidFinishLaunching(_ aNotification: Notification) { do { - camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) + camera = try Camera(sessionPreset: AVCaptureSessionPreset640x480) filter = SmoothToonFilter() camera --> filter --> renderView @@ -31,7 +30,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { } @IBAction func record(_ sender: AnyObject) { - if (!isRecording) { + if !isRecording { let movieSavingDialog = NSSavePanel() movieSavingDialog.allowedFileTypes = ["mp4"] let okayButton = movieSavingDialog.runModal() @@ -40,7 +39,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { do { self.isRecording = true // movieOutput = try MovieOutput(URL:movieSavingDialog.url!, size:Size(width:1280, height:720), liveVideo:true) - movieOutput = try MovieOutput(URL:movieSavingDialog.url!, size:Size(width:640, height:480), liveVideo:true) + movieOutput = try MovieOutput(URL: movieSavingDialog.url!, size: Size(width: 640, height: 480), liveVideo: true) // camera.audioEncodingTarget = movieOutput filter --> movieOutput! movieOutput!.startRecording() @@ -50,7 +49,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } } else { - movieOutput?.finishRecording{ + movieOutput?.finishRecording { self.isRecording = false DispatchQueue.main.async { (sender as! NSButton).title = "Record" @@ -62,4 +61,3 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } - diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift index 86437726..0b2eeea4 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift @@ -3,11 +3,9 @@ import GPUImage @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? func applicationDidFinishLaunching(_ application: UIApplication) { _needCheckFilterContainerThread = false } } - diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift index 81727d19..2798111a 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift @@ -5,17 +5,15 @@ import AVFoundation let blendImageName = "WID-small.jpg" class FilterDisplayViewController: UIViewController, UISplitViewControllerDelegate { - @IBOutlet var filterSlider: UISlider? @IBOutlet var filterView: RenderView? - let videoCamera:Camera? - var blendImage:PictureInput? + let videoCamera: Camera? + var blendImage: PictureInput? - required init(coder aDecoder: NSCoder) - { + required init(coder aDecoder: NSCoder) { do { - videoCamera = try Camera(sessionPreset:AVCaptureSession.Preset.vga640x480, location:.backFacing) + videoCamera = try Camera(sessionPreset: AVCaptureSession.Preset.vga640x480, location: .backFacing) videoCamera!.runBenchmark = true } catch { videoCamera = nil @@ -45,7 +43,7 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega currentFilterConfiguration.filter.addTarget(view) case .blend: videoCamera.addTarget(currentFilterConfiguration.filter) - self.blendImage = try? PictureInput(imageName:blendImageName) + self.blendImage = try? PictureInput(imageName: blendImageName) self.blendImage?.addTarget(currentFilterConfiguration.filter) self.blendImage?.processImage() currentFilterConfiguration.filter.addTarget(view) @@ -76,8 +74,8 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega @IBAction func updateSliderValue() { if let currentFilterConfiguration = self.filterOperation { - switch (currentFilterConfiguration.sliderConfiguration) { - case .enabled(_, _, _): currentFilterConfiguration.updateBasedOnSliderValue(Float(self.filterSlider!.value)) + switch currentFilterConfiguration.sliderConfiguration { + case .enabled: currentFilterConfiguration.updateBasedOnSliderValue(Float(self.filterSlider!.value)) case .disabled: break } } @@ -104,4 +102,3 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega } } - diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift index 430af00b..a596376e 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift @@ -1,13 +1,12 @@ import UIKit class FilterListViewController: UITableViewController { - - var filterDisplayViewController: FilterDisplayViewController? = nil + var filterDisplayViewController: FilterDisplayViewController? var objects = NSMutableArray() // #pragma mark - Segues - override func prepare(for segue: UIStoryboardSegue, sender: Any?){ + override func prepare(for segue: UIStoryboardSegue, sender: Any?) { if segue.identifier == "showDetail" { if let indexPath = self.tableView.indexPathForSelectedRow { let filterInList = filterOperations[(indexPath as NSIndexPath).row] @@ -30,9 +29,8 @@ class FilterListViewController: UITableViewController { override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { let cell = tableView.dequeueReusableCell(withIdentifier: "Cell", for: indexPath) - let filterInList:FilterOperationInterface = filterOperations[(indexPath as NSIndexPath).row] + let filterInList: FilterOperationInterface = filterOperations[(indexPath as NSIndexPath).row] cell.textLabel?.text = filterInList.listName return cell } } - diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift index 7d21e236..44ffc100 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift @@ -2,11 +2,9 @@ import UIKit @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { return true } } - diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift index a2c52890..2a729e7a 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift @@ -2,20 +2,19 @@ import UIKit import GPUImage class ViewController: UIViewController { - @IBOutlet weak var renderView: RenderView! - var picture:PictureInput! - var filter:SaturationAdjustment! + var picture: PictureInput! + var filter: SaturationAdjustment! override func viewDidLayoutSubviews() { super.viewDidLayoutSubviews() // Filtering image for saving - let testImage = UIImage(named:"WID-small.jpg")! + let testImage = UIImage(named: "WID-small.jpg")! let toonFilter = SmoothToonFilter() - let filteredImage:UIImage + let filteredImage: UIImage do { filteredImage = try testImage.filterWithOperation(toonFilter) } catch { @@ -25,17 +24,16 @@ class ViewController: UIViewController { let pngImage = UIImagePNGRepresentation(filteredImage)! do { - let documentsDir = try FileManager.default.url(for:.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) - let fileURL = URL(string:"test.png", relativeTo:documentsDir)! - try pngImage.write(to:fileURL, options:.atomic) + let documentsDir = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) + let fileURL = URL(string: "test.png", relativeTo: documentsDir)! + try pngImage.write(to: fileURL, options: .atomic) } catch { print("Couldn't write to file with error: \(error)") } - // Filtering image for display do { - picture = try PictureInput(image:UIImage(named:"WID-small.jpg")!) + picture = try PictureInput(image: UIImage(named: "WID-small.jpg")!) } catch { print("Couldn't create PictureInput with error: \(error)") return @@ -45,4 +43,3 @@ class ViewController: UIViewController { picture.processImage() } } - diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift index 14a4337e..9cd476cd 100644 --- a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift @@ -10,10 +10,8 @@ import UIKit @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? - func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { // Override point for customization after application launch. return true @@ -41,6 +39,4 @@ class AppDelegate: UIResponder, UIApplicationDelegate { // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. } - } - diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift index ce027377..0e159b17 100755 --- a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift @@ -12,12 +12,11 @@ import CoreAudio import AVFoundation class ViewController: UIViewController { - - @IBOutlet var progressView:UIProgressView! + @IBOutlet var progressView: UIProgressView! - var movieInput:MovieInput! - var movieOutput:MovieOutput! - var filter:MissEtikateFilter! + var movieInput: MovieInput! + var movieOutput: MovieOutput! + var filter: MissEtikateFilter! override func viewDidLoad() { super.viewDidLoad() @@ -25,77 +24,74 @@ class ViewController: UIViewController { let bundleURL = Bundle.main.resourceURL! // The movie you want to reencode - let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + let movieURL = URL(string: "sample_iPod.m4v", relativeTo: bundleURL)! - let documentsDir = FileManager().urls(for:.documentDirectory, in:.userDomainMask).first! + let documentsDir = FileManager().urls(for: .documentDirectory, in: .userDomainMask).first! // The location you want to save the new video - let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! + let exportedURL = URL(string: "test.mp4", relativeTo: documentsDir)! - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] - let asset = AVURLAsset(url:movieURL, options:inputOptions) + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] + let asset = AVURLAsset(url: movieURL, options: inputOptions) - guard let videoTrack = asset.tracks(withMediaType:AVMediaType.video).first else { return } - let audioTrack = asset.tracks(withMediaType:AVMediaType.audio).first + guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first else { return } + let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first - let audioDecodingSettings:[String:Any]? - let audioEncodingSettings:[String:Any]? - var audioSourceFormatHint:CMFormatDescription? = nil + let audioDecodingSettings: [String: Any]? + let audioEncodingSettings: [String: Any]? + var audioSourceFormatHint: CMFormatDescription? let shouldPassthroughAudio = false - if(shouldPassthroughAudio) { + if shouldPassthroughAudio { audioDecodingSettings = nil audioEncodingSettings = nil // A format hint is required when writing to certain file types with passthrough audio // A conditional downcast would not work here for some reason if let description = audioTrack?.formatDescriptions.first { audioSourceFormatHint = (description as! CMFormatDescription) } - } - else { - audioDecodingSettings = [AVFormatIDKey:kAudioFormatLinearPCM] // Noncompressed audio samples + } else { + audioDecodingSettings = [AVFormatIDKey: kAudioFormatLinearPCM] // Noncompressed audio samples var acl = AudioChannelLayout() memset(&acl, 0, MemoryLayout.size) acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo audioEncodingSettings = [ - AVFormatIDKey:kAudioFormatMPEG4AAC, - AVNumberOfChannelsKey:2, - AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, - AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), - AVEncoderBitRateKey:96000 + AVFormatIDKey: kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey: 2, + AVSampleRateKey: AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey: NSData(bytes: &acl, length: MemoryLayout.size), + AVEncoderBitRateKey: 96000 ] audioSourceFormatHint = nil } do { - movieInput = try MovieInput(asset:asset, videoComposition:nil, playAtActualSpeed:false, loop:false, audioSettings:audioDecodingSettings) - } - catch { + movieInput = try MovieInput(asset: asset, videoComposition: nil, playAtActualSpeed: false, loop: false, audioSettings: audioDecodingSettings) + } catch { print("ERROR: Unable to setup MovieInput with error: \(error)") return } try? FileManager().removeItem(at: exportedURL) - let videoEncodingSettings:[String:Any] = [ + let videoEncodingSettings: [String: Any] = [ AVVideoCompressionPropertiesKey: [ - AVVideoExpectedSourceFrameRateKey:videoTrack.nominalFrameRate, - AVVideoAverageBitRateKey:videoTrack.estimatedDataRate, - AVVideoProfileLevelKey:AVVideoProfileLevelH264HighAutoLevel, - AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC, - AVVideoAllowFrameReorderingKey:videoTrack.requiresFrameReordering], - AVVideoCodecKey:AVVideoCodecH264] + AVVideoExpectedSourceFrameRateKey: videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey: videoTrack.estimatedDataRate, + AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey: videoTrack.requiresFrameReordering], + AVVideoCodecKey: AVVideoCodecH264] do { - movieOutput = try MovieOutput(URL: exportedURL, size:Size(width:Float(videoTrack.naturalSize.width), height:Float(videoTrack.naturalSize.height)), fileType:AVFileType.mp4.rawValue, liveVideo:false, videoSettings:videoEncodingSettings, videoNaturalTimeScale:videoTrack.naturalTimeScale, audioSettings:audioEncodingSettings, audioSourceFormatHint:audioSourceFormatHint) - } - catch { + movieOutput = try MovieOutput(URL: exportedURL, size: Size(width: Float(videoTrack.naturalSize.width), height: Float(videoTrack.naturalSize.height)), fileType: AVFileType.mp4.rawValue, liveVideo: false, videoSettings: videoEncodingSettings, videoNaturalTimeScale: videoTrack.naturalTimeScale, audioSettings: audioEncodingSettings, audioSourceFormatHint: audioSourceFormatHint) + } catch { print("ERROR: Unable to setup MovieOutput with error: \(error)") return } filter = MissEtikateFilter() - if(audioTrack != nil) { movieInput.audioEncodingTarget = movieOutput } + if audioTrack != nil { movieInput.audioEncodingTarget = movieOutput } movieInput.synchronizedMovieOutput = movieOutput - //movieInput.synchronizedEncodingDebug = true + // movieInput.synchronizedEncodingDebug = true movieInput --> filter --> movieOutput movieInput.completion = { @@ -114,8 +110,8 @@ class ViewController: UIViewController { } } - movieOutput.startRecording() { started, error in - if(!started) { + movieOutput.startRecording { started, error in + if !started { print("ERROR: MovieOutput unable to start writing with error: \(String(describing: error))") return } @@ -129,6 +125,4 @@ class ViewController: UIViewController { // Dispose of any resources that can be recreated. } - } - diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift index e6bc194a..7b25de29 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift @@ -10,10 +10,8 @@ import UIKit @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? - func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { // Override point for customization after application launch. return true @@ -41,6 +39,4 @@ class AppDelegate: UIResponder, UIApplicationDelegate { // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. } - } - diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift index ff9431f2..ee04dcb9 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift @@ -4,23 +4,22 @@ import CoreAudio import AVFoundation class ViewController: UIViewController { - @IBOutlet weak var renderView: RenderView! - var movie:MovieInput! - var filter:Pixellate! - var speaker:SpeakerOutput! + var movie: MovieInput! + var filter: Pixellate! + var speaker: SpeakerOutput! override func viewDidLoad() { super.viewDidLoad() let bundleURL = Bundle.main.resourceURL! - let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + let movieURL = URL(string: "sample_iPod.m4v", relativeTo: bundleURL)! do { - let audioDecodeSettings = [AVFormatIDKey:kAudioFormatLinearPCM] + let audioDecodeSettings = [AVFormatIDKey: kAudioFormatLinearPCM] - movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings:audioDecodeSettings) + movie = try MovieInput(url: movieURL, playAtActualSpeed: true, loop: true, audioSettings: audioDecodeSettings) speaker = SpeakerOutput() movie.audioEncodingTarget = speaker @@ -54,4 +53,3 @@ class ViewController: UIViewController { speaker.start() } } - diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift index e2bb4729..c62fcaf2 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift @@ -2,13 +2,9 @@ import UIKit @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? - func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { - - return true } @@ -31,6 +27,4 @@ class AppDelegate: UIResponder, UIApplicationDelegate { // Pause camera if not already } - } - diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift index ae28dce9..64c90327 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift @@ -17,13 +17,13 @@ class ViewController: UIViewController { }() let saturationFilter = SaturationAdjustment() let blendFilter = AlphaBlend() - var camera:Camera! + var camera: Camera! override func viewDidLoad() { super.viewDidLoad() do { - camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) + camera = try Camera(sessionPreset: AVCaptureSessionPreset640x480) camera.runBenchmark = true camera.delegate = self camera --> saturationFilter --> blendFilter --> renderView @@ -46,8 +46,8 @@ class ViewController: UIViewController { @IBAction func capture(_ sender: AnyObject) { print("Capture") do { - let documentsDir = try FileManager.default.url(for:.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) - saturationFilter.saveNextFrameToURL(URL(string:"TestImage.png", relativeTo:documentsDir)!, format:.png) + let documentsDir = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) + saturationFilter.saveNextFrameToURL(URL(string: "TestImage.png", relativeTo: documentsDir)!, format: .png) } catch { print("Couldn't save image: \(error)") } @@ -78,7 +78,7 @@ extension ViewController: CameraDelegate { let flip = CGAffineTransform(scaleX: 1, y: -1) let rotate = flip.rotated(by: CGFloat(-M_PI_2)) let translate = rotate.translatedBy(x: -1, y: -1) - let xform = translate.scaledBy(x: CGFloat(2/fbSize.width), y: CGFloat(2/fbSize.height)) + let xform = translate.scaledBy(x: CGFloat(2 / fbSize.width), y: CGFloat(2 / fbSize.height)) let glRect = bounds.applying(xform) let x = Float(glRect.origin.x) diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift index 9da71688..1fabf495 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift @@ -2,12 +2,9 @@ import UIKit @UIApplicationMain class AppDelegate: UIResponder, UIApplicationDelegate { - var window: UIWindow? - func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { return true } } - diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift index 5b3c3c69..14f28f50 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift @@ -4,16 +4,16 @@ import AVFoundation class ViewController: UIViewController { @IBOutlet weak var renderView: RenderView! - var camera:Camera! - var filter:SaturationAdjustment! + var camera: Camera! + var filter: SaturationAdjustment! var isRecording = false - var movieOutput:MovieOutput? = nil + var movieOutput: MovieOutput? override func viewDidLoad() { super.viewDidLoad() do { - camera = try Camera(sessionPreset:AVCaptureSession.Preset.vga640x480) + camera = try Camera(sessionPreset: AVCaptureSession.Preset.vga640x480) camera.runBenchmark = true filter = SaturationAdjustment() camera --> filter --> renderView @@ -28,13 +28,13 @@ class ViewController: UIViewController { } @IBAction func capture(_ sender: AnyObject) { - if (!isRecording) { + if !isRecording { do { self.isRecording = true - let documentsDir = try FileManager.default.url(for:.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) - let fileURL = URL(string:"test.mp4", relativeTo:documentsDir)! + let documentsDir = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) + let fileURL = URL(string: "test.mp4", relativeTo: documentsDir)! do { - try FileManager.default.removeItem(at:fileURL) + try FileManager.default.removeItem(at: fileURL) } catch { } @@ -45,19 +45,19 @@ class ViewController: UIViewController { fatalError("ERROR: Could not connect audio target with error: \(error)") } - let audioSettings = self.camera!.audioOutput?.recommendedAudioSettingsForAssetWriter(writingTo:AVFileType.mp4) as? [String : Any] - var videoSettings:[String : Any]? = nil + let audioSettings = self.camera!.audioOutput?.recommendedAudioSettingsForAssetWriter(writingTo: AVFileType.mp4) as? [String: Any] + var videoSettings: [String: Any]? if #available(iOS 11.0, *) { - videoSettings = self.camera!.videoOutput.recommendedVideoSettings(forVideoCodecType:.h264, assetWriterOutputFileType:AVFileType.mp4) as? [String : Any] + videoSettings = self.camera!.videoOutput.recommendedVideoSettings(forVideoCodecType: .h264, assetWriterOutputFileType: AVFileType.mp4) as? [String: Any] videoSettings![AVVideoWidthKey] = nil videoSettings![AVVideoHeightKey] = nil } - movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), fileType:AVFileType.mp4, liveVideo:true, videoSettings:videoSettings, audioSettings:audioSettings) + movieOutput = try MovieOutput(URL: fileURL, size: Size(width: 480, height: 640), fileType: AVFileType.mp4, liveVideo: true, videoSettings: videoSettings, audioSettings: audioSettings) camera.audioEncodingTarget = movieOutput filter --> movieOutput! - movieOutput!.startRecording() { started, error in - if(!started) { + movieOutput!.startRecording { started, error in + if !started { self.isRecording = false fatalError("ERROR: Could not start writing with error: \(String(describing: error))") } @@ -70,7 +70,7 @@ class ViewController: UIViewController { fatalError("Couldn't initialize movie, error: \(error)") } } else { - movieOutput?.finishRecording{ + movieOutput?.finishRecording { self.isRecording = false DispatchQueue.main.async { (sender as! UIButton).titleLabel!.text = "Record" diff --git a/framework/Package.swift b/framework/Package.swift index 66277ad0..6d6a5b81 100644 --- a/framework/Package.swift +++ b/framework/Package.swift @@ -16,22 +16,21 @@ let excludes = ["Linux", "Mac"] #elseif os(Linux) -let excludes = ["iOS", "Mac"] +let excludes = ["iOS", "Mac"] #endif - #if os(Linux) || os(macOS) || os(Linux) let package = Package( name: "GPUImage", providers: [ - .Apt("libv4l-dev"), - ], + .Apt("libv4l-dev") + ], targets: [ Target(name: "GPUImage") - ], - dependencies:[], + ], + dependencies: [], exclude: excludes ) @@ -39,11 +38,10 @@ let package = Package( package.dependencies.append([ .Package(url: "./Packages/CVideo4Linux", majorVersion: 1), .Package(url: "./Packages/COpenGL", majorVersion: 1), - .Package(url: "./Packages/CFreeGLUT", majorVersion: 1), - ]) + .Package(url: "./Packages/CFreeGLUT", majorVersion: 1) + ]) #endif - #else fatalError("Unsupported OS") diff --git a/framework/Packages/CFreeGLUT/Package.swift b/framework/Packages/CFreeGLUT/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/CFreeGLUT/Package.swift +++ b/framework/Packages/CFreeGLUT/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Packages/COpenGL/Package.swift b/framework/Packages/COpenGL/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/COpenGL/Package.swift +++ b/framework/Packages/COpenGL/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Packages/COpenGLES/Package.swift b/framework/Packages/COpenGLES/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/COpenGLES/Package.swift +++ b/framework/Packages/COpenGLES/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Packages/CVideo4Linux/Package.swift b/framework/Packages/CVideo4Linux/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/CVideo4Linux/Package.swift +++ b/framework/Packages/CVideo4Linux/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Packages/CVideoCore/Package.swift b/framework/Packages/CVideoCore/Package.swift index e69de29b..8b137891 100755 --- a/framework/Packages/CVideoCore/Package.swift +++ b/framework/Packages/CVideoCore/Package.swift @@ -0,0 +1 @@ + diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 384bb5f7..f39e83cb 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -1,6 +1,6 @@ import Foundation -public func defaultVertexShaderForInputs(_ inputCount:UInt) -> String { +public func defaultVertexShaderForInputs(_ inputCount: UInt) -> String { switch inputCount { case 1: return OneInputVertexShader case 2: return TwoInputVertexShader @@ -12,12 +12,12 @@ public func defaultVertexShaderForInputs(_ inputCount:UInt) -> String { } open class BasicOperation: ImageProcessingOperation { - public let maximumInputs:UInt - public var overriddenOutputSize:Size? - public var overriddenOutputRotation:Rotation? + public let maximumInputs: UInt + public var overriddenOutputSize: Size? + public var overriddenOutputRotation: Rotation? public var backgroundColor = Color.black - public var drawUnmodifiedImageOutsideOfMask:Bool = true - public var mask:ImageSource? { + public var drawUnmodifiedImageOutsideOfMask: Bool = true + public var mask: ImageSource? { didSet { if let mask = mask { maskImageRelay.newImageCallback = {[weak self] framebuffer in @@ -33,7 +33,7 @@ open class BasicOperation: ImageProcessingOperation { } } } - public var activatePassthroughOnNextFrame:Bool = false + public var activatePassthroughOnNextFrame: Bool = false public var uniformSettings = ShaderUniformSettings() // MARK: - @@ -41,36 +41,36 @@ open class BasicOperation: ImageProcessingOperation { public let targets = TargetContainer() public let sources = SourceContainer() - var shader:ShaderProgram - public var inputFramebuffers = [UInt:Framebuffer]() - var renderFramebuffer:Framebuffer! - var outputFramebuffer:Framebuffer { get { return renderFramebuffer } } - let usesAspectRatio:Bool + var shader: ShaderProgram + public var inputFramebuffers = [UInt: Framebuffer]() + var renderFramebuffer: Framebuffer! + var outputFramebuffer: Framebuffer { get { return renderFramebuffer } } + let usesAspectRatio: Bool let maskImageRelay = ImageRelay() - var maskFramebuffer:Framebuffer? + var maskFramebuffer: Framebuffer? // MARK: - // MARK: Initialization and teardown - public init(shader:ShaderProgram, numberOfInputs:UInt = 1) { + public init(shader: ShaderProgram, numberOfInputs: UInt = 1) { self.maximumInputs = numberOfInputs self.shader = shader usesAspectRatio = shader.uniformIndex("aspectRatio") != nil } - public init(vertexShader:String? = nil, fragmentShader:String, numberOfInputs:UInt = 1, operationName:String = #file) { - let compiledShader = crashOnShaderCompileFailure(operationName){try sharedImageProcessingContext.programForVertexShader(vertexShader ?? defaultVertexShaderForInputs(numberOfInputs), fragmentShader:fragmentShader)} + public init(vertexShader: String? = nil, fragmentShader: String, numberOfInputs: UInt = 1, operationName: String = #file) { + let compiledShader = crashOnShaderCompileFailure(operationName) { try sharedImageProcessingContext.programForVertexShader(vertexShader ?? defaultVertexShaderForInputs(numberOfInputs), fragmentShader: fragmentShader) } self.maximumInputs = numberOfInputs self.shader = compiledShader usesAspectRatio = shader.uniformIndex("aspectRatio") != nil } - public init(vertexShaderFile:URL? = nil, fragmentShaderFile:URL, numberOfInputs:UInt = 1, operationName:String = #file) throws { - let compiledShader:ShaderProgram + public init(vertexShaderFile: URL? = nil, fragmentShaderFile: URL, numberOfInputs: UInt = 1, operationName: String = #file) throws { + let compiledShader: ShaderProgram if let vertexShaderFile = vertexShaderFile { - compiledShader = crashOnShaderCompileFailure(operationName){try sharedImageProcessingContext.programForVertexShader(vertexShaderFile, fragmentShader:fragmentShaderFile)} + compiledShader = crashOnShaderCompileFailure(operationName) { try sharedImageProcessingContext.programForVertexShader(vertexShaderFile, fragmentShader: fragmentShaderFile) } } else { - compiledShader = crashOnShaderCompileFailure(operationName){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(numberOfInputs), fragmentShader:fragmentShaderFile)} + compiledShader = crashOnShaderCompileFailure(operationName) { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(numberOfInputs), fragmentShader: fragmentShaderFile) } } self.maximumInputs = numberOfInputs self.shader = compiledShader @@ -84,19 +84,19 @@ open class BasicOperation: ImageProcessingOperation { // MARK: - // MARK: Rendering - open func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + open func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { if let previousFramebuffer = inputFramebuffers[fromSourceIndex] { previousFramebuffer.unlock() } inputFramebuffers[fromSourceIndex] = framebuffer - guard (!activatePassthroughOnNextFrame) else { // Use this to allow a bootstrap of cyclical processing, like with a low pass filter + guard !activatePassthroughOnNextFrame else { // Use this to allow a bootstrap of cyclical processing, like with a low pass filter activatePassthroughOnNextFrame = false updateTargetsWithFramebuffer(framebuffer) return } - if (UInt(inputFramebuffers.count) >= maximumInputs) { + if UInt(inputFramebuffers.count) >= maximumInputs { renderFrame() updateTargetsWithFramebuffer(outputFramebuffer) @@ -104,7 +104,7 @@ open class BasicOperation: ImageProcessingOperation { } open func renderFrame() { - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil:mask != nil) + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil: mask != nil) let textureProperties = initialTextureProperties() configureFramebufferSpecificUniforms(inputFramebuffers[0]!) @@ -113,29 +113,28 @@ open class BasicOperation: ImageProcessingOperation { clearFramebufferWithColor(backgroundColor) if let maskFramebuffer = maskFramebuffer { if drawUnmodifiedImageOutsideOfMask { - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:nil, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: nil, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) } renderStencilMaskFromFramebuffer(maskFramebuffer) - internalRenderFunction(inputFramebuffers[0]!, textureProperties:textureProperties) + internalRenderFunction(inputFramebuffers[0]!, textureProperties: textureProperties) disableStencil() } else { - internalRenderFunction(inputFramebuffers[0]!, textureProperties:textureProperties) + internalRenderFunction(inputFramebuffers[0]!, textureProperties: textureProperties) } } - func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + func internalRenderFunction(_ inputFramebuffer: Framebuffer, textureProperties: [InputTextureProperties]) { + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) releaseIncomingFramebuffers() } func releaseIncomingFramebuffers() { - var remainingFramebuffers = [UInt:Framebuffer]() + var remainingFramebuffers = [UInt: Framebuffer]() // If all inputs are still images, have this output behave as one renderFramebuffer.timingStyle = .stillImage - var latestTimestamp:Timestamp? + var latestTimestamp: Timestamp? for (key, framebuffer) in inputFramebuffers { - // When there are multiple transient input sources, use the latest timestamp as the value to pass along if let timestamp = framebuffer.timingStyle.timestamp { if !(timestamp < (latestTimestamp ?? timestamp)) { @@ -156,7 +155,7 @@ open class BasicOperation: ImageProcessingOperation { inputFramebuffers = remainingFramebuffers } - func sizeOfInitialStageBasedOnFramebuffer(_ inputFramebuffer:Framebuffer) -> GLSize { + func sizeOfInitialStageBasedOnFramebuffer(_ inputFramebuffer: Framebuffer) -> GLSize { if let outputSize = overriddenOutputSize { return GLSize(outputSize) } else { @@ -180,18 +179,17 @@ open class BasicOperation: ImageProcessingOperation { return inputTextureProperties } - open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { if usesAspectRatio { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) uniformSettings["aspectRatio"] = inputFramebuffer.aspectRatioForRotation(outputRotation) } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { - //guard let renderFramebuffer = self.renderFramebuffer, (!renderFramebuffer.timingStyle.isTransient()) else { return } + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { + // guard let renderFramebuffer = self.renderFramebuffer, (!renderFramebuffer.timingStyle.isTransient()) else { return } - //renderFramebuffer.lock() - //target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) + // renderFramebuffer.lock() + // target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) } } - diff --git a/framework/Source/CameraConversion.swift b/framework/Source/CameraConversion.swift index 17679979..d3f5390e 100644 --- a/framework/Source/CameraConversion.swift +++ b/framework/Source/CameraConversion.swift @@ -1,36 +1,36 @@ // Note: the original name of YUVToRGBConversion.swift for this file chokes the compiler on Linux for some reason // BT.601, which is the standard for SDTV. -public let colorConversionMatrix601Default = Matrix3x3(rowMajorValues:[ - 1.164, 1.164, 1.164, +public let colorConversionMatrix601Default = Matrix3x3(rowMajorValues: [ + 1.164, 1.164, 1.164, 0.0, -0.392, 2.017, - 1.596, -0.813, 0.0 + 1.596, -0.813, 0.0 ]) // BT.601 full range (ref: http://www.equasys.de/colorconversion.html) -public let colorConversionMatrix601FullRangeDefault = Matrix3x3(rowMajorValues:[ - 1.0, 1.0, 1.0, - 0.0, -0.343, 1.765, - 1.4, -0.711, 0.0, +public let colorConversionMatrix601FullRangeDefault = Matrix3x3(rowMajorValues: [ + 1.0, 1.0, 1.0, + 0.0, -0.343, 1.765, + 1.4, -0.711, 0.0 ]) // BT.709, which is the standard for HDTV. -public let colorConversionMatrix709Default = Matrix3x3(rowMajorValues:[ - 1.164, 1.164, 1.164, +public let colorConversionMatrix709Default = Matrix3x3(rowMajorValues: [ + 1.164, 1.164, 1.164, 0.0, -0.213, 2.112, - 1.793, -0.533, 0.0, + 1.793, -0.533, 0.0 ]) -public func convertYUVToRGB(shader:ShaderProgram, luminanceFramebuffer:Framebuffer, chrominanceFramebuffer:Framebuffer, secondChrominanceFramebuffer:Framebuffer? = nil, resizeOutput: ResizeOutputInfo? = nil, resultFramebuffer:Framebuffer, colorConversionMatrix:Matrix3x3) { - let textureProperties:[InputTextureProperties] +public func convertYUVToRGB(shader: ShaderProgram, luminanceFramebuffer: Framebuffer, chrominanceFramebuffer: Framebuffer, secondChrominanceFramebuffer: Framebuffer? = nil, resizeOutput: ResizeOutputInfo? = nil, resultFramebuffer: Framebuffer, colorConversionMatrix: Matrix3x3) { + let textureProperties: [InputTextureProperties] let luminanceTextureProperties: InputTextureProperties let chrominanceTextureProperties: InputTextureProperties var secondChrominanceTextureProperties: InputTextureProperties? if let resizeOutput = resizeOutput { - luminanceTextureProperties = InputTextureProperties(textureCoordinates:luminanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin:resizeOutput.normalizedOffsetFromOrigin, cropSize:resizeOutput.normalizedCropSize), texture:luminanceFramebuffer.texture) - chrominanceTextureProperties = InputTextureProperties(textureCoordinates:chrominanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin:resizeOutput.normalizedOffsetFromOrigin, cropSize:resizeOutput.normalizedCropSize), texture:chrominanceFramebuffer.texture) + luminanceTextureProperties = InputTextureProperties(textureCoordinates: luminanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin: resizeOutput.normalizedOffsetFromOrigin, cropSize: resizeOutput.normalizedCropSize), texture: luminanceFramebuffer.texture) + chrominanceTextureProperties = InputTextureProperties(textureCoordinates: chrominanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin: resizeOutput.normalizedOffsetFromOrigin, cropSize: resizeOutput.normalizedCropSize), texture: chrominanceFramebuffer.texture) if let secondChrominanceFramebuffer = secondChrominanceFramebuffer { - secondChrominanceTextureProperties = InputTextureProperties(textureCoordinates:secondChrominanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin:resizeOutput.normalizedOffsetFromOrigin, cropSize:resizeOutput.normalizedCropSize), texture:secondChrominanceFramebuffer.texture) + secondChrominanceTextureProperties = InputTextureProperties(textureCoordinates: secondChrominanceFramebuffer.orientation.rotationNeededForOrientation(resultFramebuffer.orientation).croppedTextureCoordinates(offsetFromOrigin: resizeOutput.normalizedOffsetFromOrigin, cropSize: resizeOutput.normalizedCropSize), texture: secondChrominanceFramebuffer.texture) } } else { luminanceTextureProperties = luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation) @@ -49,7 +49,7 @@ public func convertYUVToRGB(shader:ShaderProgram, luminanceFramebuffer:Framebuff clearFramebufferWithColor(Color.black) var uniformSettings = ShaderUniformSettings() uniformSettings["colorConversionMatrix"] = colorConversionMatrix - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) luminanceFramebuffer.unlock() chrominanceFramebuffer.unlock() secondChrominanceFramebuffer?.unlock() diff --git a/framework/Source/Color.swift b/framework/Source/Color.swift index 370e4f00..efb099c1 100644 --- a/framework/Source/Color.swift +++ b/framework/Source/Color.swift @@ -1,20 +1,20 @@ public struct Color { - public let redComponent:Float - public let greenComponent:Float - public let blueComponent:Float - public let alphaComponent:Float + public let redComponent: Float + public let greenComponent: Float + public let blueComponent: Float + public let alphaComponent: Float - public init(red:Float, green:Float, blue:Float, alpha:Float = 1.0) { + public init(red: Float, green: Float, blue: Float, alpha: Float = 1.0) { self.redComponent = red self.greenComponent = green self.blueComponent = blue self.alphaComponent = alpha } - public static let black = Color(red:0.0, green:0.0, blue:0.0, alpha:1.0) - public static let white = Color(red:1.0, green:1.0, blue:1.0, alpha:1.0) - public static let red = Color(red:1.0, green:0.0, blue:0.0, alpha:1.0) - public static let green = Color(red:0.0, green:1.0, blue:0.0, alpha:1.0) - public static let blue = Color(red:0.0, green:0.0, blue:1.0, alpha:1.0) - public static let transparent = Color(red:0.0, green:0.0, blue:0.0, alpha:0.0) + public static let black = Color(red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0) + public static let white = Color(red: 1.0, green: 1.0, blue: 1.0, alpha: 1.0) + public static let red = Color(red: 1.0, green: 0.0, blue: 0.0, alpha: 1.0) + public static let green = Color(red: 0.0, green: 1.0, blue: 0.0, alpha: 1.0) + public static let blue = Color(red: 0.0, green: 0.0, blue: 1.0, alpha: 1.0) + public static let transparent = Color(red: 0.0, green: 0.0, blue: 0.0, alpha: 0.0) } diff --git a/framework/Source/FillMode.swift b/framework/Source/FillMode.swift index 650e9c96..ddd38d2e 100644 --- a/framework/Source/FillMode.swift +++ b/framework/Source/FillMode.swift @@ -13,24 +13,23 @@ import Glibc #endif #endif - public enum FillMode { case stretch case preserveAspectRatio case preserveAspectRatioAndFill - func transformVertices(_ vertices:[GLfloat], fromInputSize:GLSize, toFitSize:GLSize) -> [GLfloat] { - guard (vertices.count == 8) else { fatalError("Attempted to transform a non-quad to account for fill mode.") } + func transformVertices(_ vertices: [GLfloat], fromInputSize: GLSize, toFitSize: GLSize) -> [GLfloat] { + guard vertices.count == 8 else { fatalError("Attempted to transform a non-quad to account for fill mode.") } let aspectRatio = GLfloat(fromInputSize.height) / GLfloat(fromInputSize.width) let targetAspectRatio = GLfloat(toFitSize.height) / GLfloat(toFitSize.width) - let yRatio:GLfloat - let xRatio:GLfloat + let yRatio: GLfloat + let xRatio: GLfloat switch self { case .stretch: return vertices case .preserveAspectRatio: - if (aspectRatio > targetAspectRatio) { + if aspectRatio > targetAspectRatio { yRatio = 1.0 // xRatio = (GLfloat(toFitSize.height) / GLfloat(fromInputSize.height)) * (GLfloat(fromInputSize.width) / GLfloat(toFitSize.width)) xRatio = (GLfloat(fromInputSize.width) / GLfloat(toFitSize.width)) * (GLfloat(toFitSize.height) / GLfloat(fromInputSize.height)) @@ -39,7 +38,7 @@ public enum FillMode { yRatio = (GLfloat(fromInputSize.height) / GLfloat(toFitSize.height)) * (GLfloat(toFitSize.width) / GLfloat(fromInputSize.width)) } case .preserveAspectRatioAndFill: - if (aspectRatio > targetAspectRatio) { + if aspectRatio > targetAspectRatio { xRatio = 1.0 yRatio = (GLfloat(fromInputSize.height) / GLfloat(toFitSize.height)) * (GLfloat(toFitSize.width) / GLfloat(fromInputSize.width)) } else { @@ -51,21 +50,21 @@ public enum FillMode { // return [vertices[0] * xRatio, vertices[1] * yRatio, vertices[2] * xRatio, vertices[3] * yRatio, vertices[4] * xRatio, vertices[5] * yRatio, vertices[6] * xRatio, vertices[7] * yRatio] // TODO: Determine if this is misaligning things - let xConversionRatio:GLfloat = xRatio * GLfloat(toFitSize.width) / 2.0 - let xConversionDivisor:GLfloat = GLfloat(toFitSize.width) / 2.0 - let yConversionRatio:GLfloat = yRatio * GLfloat(toFitSize.height) / 2.0 - let yConversionDivisor:GLfloat = GLfloat(toFitSize.height) / 2.0 + let xConversionRatio: GLfloat = xRatio * GLfloat(toFitSize.width) / 2.0 + let xConversionDivisor = GLfloat(toFitSize.width) / 2.0 + let yConversionRatio: GLfloat = yRatio * GLfloat(toFitSize.height) / 2.0 + let yConversionDivisor = GLfloat(toFitSize.height) / 2.0 // The Double casting here is required by Linux - let value1:GLfloat = GLfloat(round(Double(vertices[0] * xConversionRatio))) / xConversionDivisor - let value2:GLfloat = GLfloat(round(Double(vertices[1] * yConversionRatio))) / yConversionDivisor - let value3:GLfloat = GLfloat(round(Double(vertices[2] * xConversionRatio))) / xConversionDivisor - let value4:GLfloat = GLfloat(round(Double(vertices[3] * yConversionRatio))) / yConversionDivisor - let value5:GLfloat = GLfloat(round(Double(vertices[4] * xConversionRatio))) / xConversionDivisor - let value6:GLfloat = GLfloat(round(Double(vertices[5] * yConversionRatio))) / yConversionDivisor - let value7:GLfloat = GLfloat(round(Double(vertices[6] * xConversionRatio))) / xConversionDivisor - let value8:GLfloat = GLfloat(round(Double(vertices[7] * yConversionRatio))) / yConversionDivisor + let value1 = GLfloat(round(Double(vertices[0] * xConversionRatio))) / xConversionDivisor + let value2 = GLfloat(round(Double(vertices[1] * yConversionRatio))) / yConversionDivisor + let value3 = GLfloat(round(Double(vertices[2] * xConversionRatio))) / xConversionDivisor + let value4 = GLfloat(round(Double(vertices[3] * yConversionRatio))) / yConversionDivisor + let value5 = GLfloat(round(Double(vertices[4] * xConversionRatio))) / xConversionDivisor + let value6 = GLfloat(round(Double(vertices[5] * yConversionRatio))) / yConversionDivisor + let value7 = GLfloat(round(Double(vertices[6] * xConversionRatio))) / xConversionDivisor + let value8 = GLfloat(round(Double(vertices[7] * yConversionRatio))) / yConversionDivisor return [value1, value2, value3, value4, value5, value6, value7, value8] diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 0db9319c..107c6680 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -18,13 +18,13 @@ import Foundation import AVFoundation // TODO: Add a good lookup table to this to allow for detailed error messages -struct FramebufferCreationError:Error { - let errorCode:GLenum +struct FramebufferCreationError: Error { + let errorCode: GLenum } public enum FramebufferTimingStyle { case stillImage - case videoFrame(timestamp:Timestamp) + case videoFrame(timestamp: Timestamp) func isTransient() -> Bool { switch self { @@ -33,7 +33,7 @@ public enum FramebufferTimingStyle { } } - public var timestamp:Timestamp? { + public var timestamp: Timestamp? { get { switch self { case .stillImage: return nil @@ -44,25 +44,25 @@ public enum FramebufferTimingStyle { } public class Framebuffer: Hashable { - public var timingStyle:FramebufferTimingStyle = .stillImage - public var orientation:ImageOrientation - public var userInfo:[AnyHashable:Any]? + public var timingStyle: FramebufferTimingStyle = .stillImage + public var orientation: ImageOrientation + public var userInfo: [AnyHashable: Any]? - public let texture:GLuint - let framebuffer:GLuint? - let stencilBuffer:GLuint? - public let size:GLSize - let internalFormat:Int32 - let format:Int32 - let type:Int32 + public let texture: GLuint + let framebuffer: GLuint? + let stencilBuffer: GLuint? + public let size: GLSize + let internalFormat: Int32 + let format: Int32 + let type: Int32 - let hash:Int64 - let textureOverride:Bool + let hash: Int64 + let textureOverride: Bool let id = UUID().uuidString - unowned var context:OpenGLContext + unowned var context: OpenGLContext - public init(context:OpenGLContext, orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false, overriddenTexture:GLuint? = nil) throws { + public init(context: OpenGLContext, orientation: ImageOrientation, size: GLSize, textureOnly: Bool = false, minFilter: Int32 = GL_LINEAR, magFilter: Int32 = GL_LINEAR, wrapS: Int32 = GL_CLAMP_TO_EDGE, wrapT: Int32 = GL_CLAMP_TO_EDGE, internalFormat: Int32 = GL_RGBA, format: Int32 = GL_BGRA, type: Int32 = GL_UNSIGNED_BYTE, stencil: Bool = false, overriddenTexture: GLuint? = nil) throws { self.context = context self.size = size self.orientation = orientation @@ -70,19 +70,19 @@ public class Framebuffer: Hashable { self.format = format self.type = type - self.hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) + self.hash = hashForFramebufferWithProperties(orientation: orientation, size: size, textureOnly: textureOnly, minFilter: minFilter, magFilter: magFilter, wrapS: wrapS, wrapT: wrapT, internalFormat: internalFormat, format: format, type: type, stencil: stencil) if let newTexture = overriddenTexture { textureOverride = true texture = newTexture } else { textureOverride = false - texture = generateTexture(minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT) + texture = generateTexture(minFilter: minFilter, magFilter: magFilter, wrapS: wrapS, wrapT: wrapT) } - if (!textureOnly) { + if !textureOnly { do { - let (createdFrameBuffer, createdStencil) = try generateFramebufferForTexture(texture, width:size.width, height:size.height, internalFormat:internalFormat, format:format, type:type, stencil:stencil) + let (createdFrameBuffer, createdStencil) = try generateFramebufferForTexture(texture, width: size.width, height: size.height, internalFormat: internalFormat, format: format, type: type, stencil: stencil) framebuffer = createdFrameBuffer stencilBuffer = createdStencil } catch { @@ -97,12 +97,12 @@ public class Framebuffer: Hashable { } deinit { - if (!textureOverride) { + if !textureOverride { var mutableTexture = texture context.runOperationAsynchronously { glDeleteTextures(1, &mutableTexture) } - //debugPrint("Delete texture at size: \(size)") + // debugPrint("Delete texture at size: \(size)") } if let framebuffer = framebuffer { @@ -120,15 +120,15 @@ public class Framebuffer: Hashable { } } - public func sizeForTargetOrientation(_ targetOrientation:ImageOrientation) -> GLSize { + public func sizeForTargetOrientation(_ targetOrientation: ImageOrientation) -> GLSize { if self.orientation.rotationNeededForOrientation(targetOrientation).flipsDimensions() { - return GLSize(width:size.height, height:size.width) + return GLSize(width: size.height, height: size.width) } else { return size } } - public func aspectRatioForRotation(_ rotation:Rotation) -> Float { + public func aspectRatioForRotation(_ rotation: Rotation) -> Float { if rotation.flipsDimensions() { return Float(size.width) / Float(size.height) } else { @@ -136,27 +136,27 @@ public class Framebuffer: Hashable { } } - public func texelSize(for rotation:Rotation) -> Size { + public func texelSize(for rotation: Rotation) -> Size { if rotation.flipsDimensions() { - return Size(width:1.0 / Float(size.height), height:1.0 / Float(size.width)) + return Size(width: 1.0 / Float(size.height), height: 1.0 / Float(size.width)) } else { - return Size(width:1.0 / Float(size.width), height:1.0 / Float(size.height)) + return Size(width: 1.0 / Float(size.width), height: 1.0 / Float(size.height)) } } - func initialStageTexelSize(for rotation:Rotation) -> Size { + func initialStageTexelSize(for rotation: Rotation) -> Size { if rotation.flipsDimensions() { - return Size(width:1.0 / Float(size.height), height:0.0) + return Size(width: 1.0 / Float(size.height), height: 0.0) } else { - return Size(width:0.0, height:1.0 / Float(size.height)) + return Size(width: 0.0, height: 1.0 / Float(size.height)) } } - public func texturePropertiesForOutputRotation(_ rotation:Rotation) -> InputTextureProperties { - return InputTextureProperties(textureVBO:context.textureVBO(for:rotation), texture:texture) + public func texturePropertiesForOutputRotation(_ rotation: Rotation) -> InputTextureProperties { + return InputTextureProperties(textureVBO: context.textureVBO(for: rotation), texture: texture) } - public func texturePropertiesForTargetOrientation(_ targetOrientation:ImageOrientation) -> InputTextureProperties { + public func texturePropertiesForTargetOrientation(_ targetOrientation: ImageOrientation) -> InputTextureProperties { return texturePropertiesForOutputRotation(self.orientation.rotationNeededForOrientation(targetOrientation)) } @@ -169,7 +169,7 @@ public class Framebuffer: Hashable { // MARK: - // MARK: Framebuffer cache - public weak var cache:FramebufferCache? + public weak var cache: FramebufferCache? public var shouldReturnToCache = true var framebufferRetainCount = 0 public func lock() { @@ -182,8 +182,8 @@ public class Framebuffer: Hashable { public func unlock() { framebufferRetainCount -= 1 - if (framebufferRetainCount < 1) { - if ((framebufferRetainCount < 0) && (cache != nil)) { + if framebufferRetainCount < 1 { + if (framebufferRetainCount < 0) && (cache != nil) { print("WARNING: Tried to overrelease a framebuffer") } framebufferRetainCount = 0 @@ -200,11 +200,11 @@ public class Framebuffer: Hashable { } } -func hashForFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Int64 { - var result:Int64 = 1 - let prime:Int64 = 31 - let yesPrime:Int64 = 1231 - let noPrime:Int64 = 1237 +func hashForFramebufferWithProperties(orientation: ImageOrientation, size: GLSize, textureOnly: Bool = false, minFilter: Int32 = GL_LINEAR, magFilter: Int32 = GL_LINEAR, wrapS: Int32 = GL_CLAMP_TO_EDGE, wrapT: Int32 = GL_CLAMP_TO_EDGE, internalFormat: Int32 = GL_RGBA, format: Int32 = GL_BGRA, type: Int32 = GL_UNSIGNED_BYTE, stencil: Bool = false) -> Int64 { + var result: Int64 = 1 + let prime: Int64 = 31 + let yesPrime: Int64 = 1231 + let noPrime: Int64 = 1237 // TODO: Complete the rest of this result = prime * result + Int64(size.width) @@ -234,7 +234,7 @@ extension Rotation { } } - func croppedTextureCoordinates(offsetFromOrigin:Position, cropSize:Size) -> [GLfloat] { + func croppedTextureCoordinates(offsetFromOrigin: Position, cropSize: Size) -> [GLfloat] { let minX = GLfloat(offsetFromOrigin.x) let minY = GLfloat(offsetFromOrigin.y) let maxX = GLfloat(offsetFromOrigin.x) + GLfloat(cropSize.width) @@ -270,7 +270,7 @@ public extension Framebuffer { let bufferSize = Int(size.width * size.height * 4) guard let buffer = NSMutableData(capacity: bufferSize) else { return nil } glReadPixels(0, 0, size.width, size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), buffer.mutableBytes) - let dataProvider = CGDataProvider(dataInfo: nil, data: buffer.mutableBytes, size: bufferSize) {_,_,_ in } + let dataProvider = CGDataProvider(dataInfo: nil, data: buffer.mutableBytes, size: bufferSize) { _, _, _ in } guard let provider = dataProvider else { return nil } let cgImage = CGImage(width: Int(size.width), height: Int(size.height), diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index b3707d9d..77c79aed 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -15,31 +15,31 @@ // TODO: Add mechanism to purge framebuffers on low memory public class FramebufferCache { - var framebufferCache = [Int64:Set]() - let context:OpenGLContext + var framebufferCache = [Int64: Set]() + let context: OpenGLContext - init(context:OpenGLContext) { + init(context: OpenGLContext) { self.context = context } - public func requestFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Framebuffer { + public func requestFramebufferWithProperties(orientation: ImageOrientation, size: GLSize, textureOnly: Bool = false, minFilter: Int32 = GL_LINEAR, magFilter: Int32 = GL_LINEAR, wrapS: Int32 = GL_CLAMP_TO_EDGE, wrapT: Int32 = GL_CLAMP_TO_EDGE, internalFormat: Int32 = GL_RGBA, format: Int32 = GL_BGRA, type: Int32 = GL_UNSIGNED_BYTE, stencil: Bool = false) -> Framebuffer { __dispatch_assert_queue(context.serialDispatchQueue) - let hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) - let framebuffer:Framebuffer + let hash = hashForFramebufferWithProperties(orientation: orientation, size: size, textureOnly: textureOnly, minFilter: minFilter, magFilter: magFilter, wrapS: wrapS, wrapT: wrapT, internalFormat: internalFormat, format: format, type: type, stencil: stencil) + let framebuffer: Framebuffer - if(framebufferCache.count > 10) { + if framebufferCache.count > 10 { print("Warning: Runaway framebuffer cache with size: \(framebufferCache.count)") } - if ((framebufferCache[hash]?.count ?? -1) > 0) { - //print("Restoring previous framebuffer") + if (framebufferCache[hash]?.count ?? -1) > 0 { + // print("Restoring previous framebuffer") framebuffer = framebufferCache[hash]!.removeFirst() framebuffer.orientation = orientation } else { do { - //debugPrint("Generating new framebuffer at size: \(size)") + // debugPrint("Generating new framebuffer at size: \(size)") - framebuffer = try Framebuffer(context:context, orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) + framebuffer = try Framebuffer(context: context, orientation: orientation, size: size, textureOnly: textureOnly, minFilter: minFilter, magFilter: magFilter, wrapS: wrapS, wrapT: wrapT, internalFormat: internalFormat, format: format, type: type, stencil: stencil) framebuffer.cache = self } catch { fatalError("Could not create a framebuffer of the size (\(size.width), \(size.height)), error: \(error)") @@ -60,10 +60,10 @@ public class FramebufferCache { } } - func returnToCache(_ framebuffer:Framebuffer) { - //sprint("Returning to cache: \(framebuffer)") - context.runOperationSynchronously{ - if (self.framebufferCache[framebuffer.hash] != nil) { + func returnToCache(_ framebuffer: Framebuffer) { + // sprint("Returning to cache: \(framebuffer)") + context.runOperationSynchronously { + if self.framebufferCache[framebuffer.hash] != nil { if self.framebufferCache[framebuffer.hash]!.contains(framebuffer) { print("WARNING: add duplicated buffer to cache.") } else { @@ -75,4 +75,3 @@ public class FramebufferCache { } } } - diff --git a/framework/Source/ImageGenerator.swift b/framework/Source/ImageGenerator.swift index ea79f99b..3a0893c9 100644 --- a/framework/Source/ImageGenerator.swift +++ b/framework/Source/ImageGenerator.swift @@ -1,21 +1,21 @@ public class ImageGenerator: ImageSource { - public var size:Size + public var size: Size public let targets = TargetContainer() - var imageFramebuffer:Framebuffer! + var imageFramebuffer: Framebuffer! - public init(size:Size) { + public init(size: Size) { self.size = size do { - imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:GLSize(size)) + imageFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(size)) } catch { fatalError("Could not construct framebuffer of size: \(size), error:\(error)") } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { imageFramebuffer.lock() - target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) + target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex: atIndex) } func notifyTargets() { diff --git a/framework/Source/ImageOrientation.swift b/framework/Source/ImageOrientation.swift index 08e8c240..060e865c 100644 --- a/framework/Source/ImageOrientation.swift +++ b/framework/Source/ImageOrientation.swift @@ -4,7 +4,7 @@ public enum ImageOrientation { case landscapeLeft case landscapeRight - public func rotationNeededForOrientation(_ targetOrientation:ImageOrientation) -> Rotation { + public func rotationNeededForOrientation(_ targetOrientation: ImageOrientation) -> Rotation { switch (self, targetOrientation) { case (.portrait, .portrait), (.portraitUpsideDown, .portraitUpsideDown), (.landscapeLeft, .landscapeLeft), (.landscapeRight, .landscapeRight): return .noRotation case (.portrait, .portraitUpsideDown): return .rotate180 diff --git a/framework/Source/Linux/GLUTRenderWindow.swift b/framework/Source/Linux/GLUTRenderWindow.swift index fefea5a4..7062760c 100755 --- a/framework/Source/Linux/GLUTRenderWindow.swift +++ b/framework/Source/Linux/GLUTRenderWindow.swift @@ -5,20 +5,19 @@ import Foundation public class GLUTRenderWindow: ImageConsumer { public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - private lazy var displayShader:ShaderProgram = { + public let maximumInputs: UInt = 1 + private lazy var displayShader: ShaderProgram = { sharedImageProcessingContext.makeCurrentContext() // self.openGLContext = sharedImageProcessingContext.context - return crashOnShaderCompileFailure("GLUTRenderWindow"){try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + return crashOnShaderCompileFailure("GLUTRenderWindow") { try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() - - public init(width:UInt32, height:UInt32, title:String) { + public init(width: UInt32, height: UInt32, title: String) { var localArgc = Process.argc glutInit(&localArgc, Process.unsafeArgv) glutInitDisplayMode(UInt32(GLUT_DOUBLE)) glutInitWindowSize(Int32(width), Int32(height)) - glutInitWindowPosition(100,100) + glutInitWindowPosition(100, 100) glutCreateWindow(title) glViewport(0, 0, GLsizei(width), GLsizei(height)) @@ -28,7 +27,7 @@ public class GLUTRenderWindow: ImageConsumer { // glutReshapeFunc(void (*func)(int width, int height) // Maybe use this to get window reshape events } - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(framebuffer: Framebuffer, fromSourceIndex: UInt) { glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), 0) @@ -39,21 +38,20 @@ public class GLUTRenderWindow: ImageConsumer { glClearColor(0.0, 0.0, 0.0, 0.0) glClear(GLenum(GL_COLOR_BUFFER_BIT)) - renderQuadWithShader(self.displayShader, vertices:verticallyInvertedImageVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(.Portrait)]) + renderQuadWithShader(self.displayShader, vertices: verticallyInvertedImageVertices, inputTextures: [framebuffer.texturePropertiesForTargetOrientation(.Portrait)]) framebuffer.unlock() glutSwapBuffers() } - public func loopWithFunction(idleFunction:() -> ()) { + public func loopWithFunction(idleFunction:() -> Void) { loopFunction = idleFunction glutIdleFunc(glutCallbackFunction) glutMainLoop() } } -var loopFunction:(() -> ())! = nil +var loopFunction:(() -> Void)! = nil func glutCallbackFunction() { loopFunction() } - diff --git a/framework/Source/Linux/OpenGLContext-RPi.swift b/framework/Source/Linux/OpenGLContext-RPi.swift index 5d0f06d3..89572389 100755 --- a/framework/Source/Linux/OpenGLContext-RPi.swift +++ b/framework/Source/Linux/OpenGLContext-RPi.swift @@ -2,13 +2,13 @@ import COpenGLES.gles2 import CVideoCore public class OpenGLContext: SerialDispatch { - lazy var framebufferCache:FramebufferCache = { - return FramebufferCache(context:self) + lazy var framebufferCache: FramebufferCache = { + return FramebufferCache(context: self) }() - var shaderCache:[String:ShaderProgram] = [:] + var shaderCache: [String: ShaderProgram] = [:] - lazy var passthroughShader:ShaderProgram = { - return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + lazy var passthroughShader: ShaderProgram = { + return crashOnShaderCompileFailure("OpenGLContext") { return try self.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() // MARK: - @@ -33,25 +33,25 @@ public class OpenGLContext: SerialDispatch { // MARK: - // MARK: Device capabilities - public var maximumTextureSizeForThisDevice:GLint {get { return _maximumTextureSizeForThisDevice } } - private lazy var _maximumTextureSizeForThisDevice:GLint = { + public var maximumTextureSizeForThisDevice: GLint { get { return _maximumTextureSizeForThisDevice } } + private lazy var _maximumTextureSizeForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_SIZE) }() - public var maximumTextureUnitsForThisDevice:GLint {get { return _maximumTextureUnitsForThisDevice } } - private lazy var _maximumTextureUnitsForThisDevice:GLint = { + public var maximumTextureUnitsForThisDevice: GLint { get { return _maximumTextureUnitsForThisDevice } } + private lazy var _maximumTextureUnitsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_IMAGE_UNITS) }() - public var maximumVaryingVectorsForThisDevice:GLint {get { return _maximumVaryingVectorsForThisDevice } } - private lazy var _maximumVaryingVectorsForThisDevice:GLint = { + public var maximumVaryingVectorsForThisDevice: GLint { get { return _maximumVaryingVectorsForThisDevice } } + private lazy var _maximumVaryingVectorsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_VARYING_VECTORS) }() - lazy var extensionString:String = { - return self.runOperationSynchronously{ + lazy var extensionString: String = { + return self.runOperationSynchronously { self.makeCurrentContext() - return String(cString:unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to:UnsafePointer.self)) + return String(cString: unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to: UnsafePointer.self)) } }() -} \ No newline at end of file +} diff --git a/framework/Source/Linux/OpenGLContext.swift b/framework/Source/Linux/OpenGLContext.swift index e56337d9..546841ab 100755 --- a/framework/Source/Linux/OpenGLContext.swift +++ b/framework/Source/Linux/OpenGLContext.swift @@ -1,20 +1,19 @@ import COpenGL public class OpenGLContext: SerialDispatch { - lazy var framebufferCache:FramebufferCache = { - return FramebufferCache(context:self) + lazy var framebufferCache: FramebufferCache = { + return FramebufferCache(context: self) }() - var shaderCache:[String:ShaderProgram] = [:] + var shaderCache: [String: ShaderProgram] = [:] - lazy var passthroughShader:ShaderProgram = { - return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + lazy var passthroughShader: ShaderProgram = { + return crashOnShaderCompileFailure("OpenGLContext") { return try self.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() // MARK: - // MARK: Initialization and teardown init() { - glDisable(GLenum(GL_DEPTH_TEST)) glEnable(GLenum(GL_TEXTURE_2D)) } @@ -31,25 +30,25 @@ public class OpenGLContext: SerialDispatch { // MARK: - // MARK: Device capabilities - public var maximumTextureSizeForThisDevice:GLint {get { return _maximumTextureSizeForThisDevice } } - private lazy var _maximumTextureSizeForThisDevice:GLint = { + public var maximumTextureSizeForThisDevice: GLint { get { return _maximumTextureSizeForThisDevice } } + private lazy var _maximumTextureSizeForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_SIZE) }() - public var maximumTextureUnitsForThisDevice:GLint {get { return _maximumTextureUnitsForThisDevice } } - private lazy var _maximumTextureUnitsForThisDevice:GLint = { + public var maximumTextureUnitsForThisDevice: GLint { get { return _maximumTextureUnitsForThisDevice } } + private lazy var _maximumTextureUnitsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_IMAGE_UNITS) }() - public var maximumVaryingVectorsForThisDevice:GLint {get { return _maximumVaryingVectorsForThisDevice } } - private lazy var _maximumVaryingVectorsForThisDevice:GLint = { + public var maximumVaryingVectorsForThisDevice: GLint { get { return _maximumVaryingVectorsForThisDevice } } + private lazy var _maximumVaryingVectorsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_VARYING_VECTORS) }() - lazy var extensionString:String = { - return self.runOperationSynchronously{ + lazy var extensionString: String = { + return self.runOperationSynchronously { self.makeCurrentContext() return String.fromCString(UnsafePointer(glGetString(GLenum(GL_EXTENSIONS))))! } }() -} \ No newline at end of file +} diff --git a/framework/Source/Linux/RPiRenderWindow.swift b/framework/Source/Linux/RPiRenderWindow.swift index 603c97d0..c9079785 100755 --- a/framework/Source/Linux/RPiRenderWindow.swift +++ b/framework/Source/Linux/RPiRenderWindow.swift @@ -3,31 +3,31 @@ import CVideoCore import Foundation -var nativewindow = EGL_DISPMANX_WINDOW_T(element:0, width:0, height:0) // This needs to be retained at the top level or its deallocation will destroy the window system +var nativewindow = EGL_DISPMANX_WINDOW_T(element: 0, width: 0, height: 0) // This needs to be retained at the top level or its deallocation will destroy the window system public class RPiRenderWindow: ImageConsumer { public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - private lazy var displayShader:ShaderProgram = { + public let maximumInputs: UInt = 1 + private lazy var displayShader: ShaderProgram = { sharedImageProcessingContext.makeCurrentContext() - return crashOnShaderCompileFailure("RPiRenderWindow"){try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + return crashOnShaderCompileFailure("RPiRenderWindow") { try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() - let display:EGLDisplay - let surface:EGLSurface - let context:EGLContext + let display: EGLDisplay + let surface: EGLSurface + let context: EGLContext - let windowWidth:UInt32 - let windowHeight:UInt32 + let windowWidth: UInt32 + let windowHeight: UInt32 - public init(width:UInt32? = nil, height:UInt32? = nil) { + public init(width: UInt32? = nil, height: UInt32? = nil) { sharedImageProcessingContext.makeCurrentContext() display = eglGetDisplay(nil /* EGL_DEFAULT_DISPLAY */) // guard (display != EGL_NO_DISPLAY) else {throw renderingError(errorString:"Could not obtain display")} // guard (eglInitialize(display, nil, nil) != EGL_FALSE) else {throw renderingError(errorString:"Could not initialize display")} eglInitialize(display, nil, nil) - let attributes:[EGLint] = [ + let attributes: [EGLint] = [ EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8, @@ -36,38 +36,38 @@ public class RPiRenderWindow: ImageConsumer { EGL_NONE ] - var config:EGLConfig? = nil - var num_config:EGLint = 0 + var config: EGLConfig? + var num_config: EGLint = 0 // guard (eglChooseConfig(display, attributes, &config, 1, &num_config) != EGL_FALSE) else {throw renderingError(errorString:"Could not get a framebuffer configuration")} eglChooseConfig(display, attributes, &config, 1, &num_config) eglBindAPI(EGLenum(EGL_OPENGL_ES_API)) - //context = eglCreateContext(display, config, EGL_NO_CONTEXT, context_attributes) - let context_attributes:[EGLint] = [ + // context = eglCreateContext(display, config, EGL_NO_CONTEXT, context_attributes) + let context_attributes: [EGLint] = [ EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE ] context = eglCreateContext(display, config, nil /* EGL_NO_CONTEXT*/, context_attributes) - //guard (context != EGL_NO_CONTEXT) else {throw renderingError(errorString:"Could not create a rendering context")} + // guard (context != EGL_NO_CONTEXT) else {throw renderingError(errorString:"Could not create a rendering context")} - var screen_width:UInt32 = 0 - var screen_height:UInt32 = 0 + var screen_width: UInt32 = 0 + var screen_height: UInt32 = 0 graphics_get_display_size(0 /* LCD */, &screen_width, &screen_height) self.windowWidth = width ?? screen_width self.windowHeight = height ?? screen_height let dispman_display = vc_dispmanx_display_open( 0 /* LCD */) let dispman_update = vc_dispmanx_update_start( 0 ) - var dst_rect = VC_RECT_T(x:0, y:0, width:Int32(windowWidth), height:Int32(windowHeight)) - var src_rect = VC_RECT_T(x:0, y:0, width:Int32(windowWidth) << 16, height:Int32(windowHeight) << 16) + var dst_rect = VC_RECT_T(x: 0, y: 0, width: Int32(windowWidth), height: Int32(windowHeight)) + var src_rect = VC_RECT_T(x: 0, y: 0, width: Int32(windowWidth) << 16, height: Int32(windowHeight) << 16) let dispman_element = vc_dispmanx_element_add(dispman_update, dispman_display, 0/*layer*/, &dst_rect, 0/*src*/, &src_rect, DISPMANX_PROTECTION_T(DISPMANX_PROTECTION_NONE), nil /*alpha*/, nil/*clamp*/, DISPMANX_TRANSFORM_T(0)/*transform*/) vc_dispmanx_update_submit_sync(dispman_update) - nativewindow = EGL_DISPMANX_WINDOW_T(element:dispman_element, width:Int32(windowWidth), height:Int32(windowHeight)) + nativewindow = EGL_DISPMANX_WINDOW_T(element: dispman_element, width: Int32(windowWidth), height: Int32(windowHeight)) surface = eglCreateWindowSurface(display, config, &nativewindow, nil) - //guard (surface != EGL_NO_SURFACE) else {throw renderingError(errorString:"Could not create a rendering surface")} + // guard (surface != EGL_NO_SURFACE) else {throw renderingError(errorString:"Could not create a rendering surface")} eglMakeCurrent(display, surface, surface, context) @@ -76,7 +76,7 @@ public class RPiRenderWindow: ImageConsumer { glClear(GLenum(GL_COLOR_BUFFER_BIT)) } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), 0) @@ -85,8 +85,8 @@ public class RPiRenderWindow: ImageConsumer { glClearColor(0.0, 0.0, 0.0, 0.0) glClear(GLenum(GL_COLOR_BUFFER_BIT)) - renderQuadWithShader(self.displayShader, vertices:verticallyInvertedImageVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(.portrait)]) + renderQuadWithShader(self.displayShader, vertices: verticallyInvertedImageVertices, inputTextures: [framebuffer.texturePropertiesForTargetOrientation(.portrait)]) framebuffer.unlock() eglSwapBuffers(display, surface) } -} \ No newline at end of file +} diff --git a/framework/Source/Linux/V4LCamera.swift b/framework/Source/Linux/V4LCamera.swift index 0f26146f..9d345e9b 100755 --- a/framework/Source/Linux/V4LCamera.swift +++ b/framework/Source/Linux/V4LCamera.swift @@ -122,4 +122,4 @@ public class V4LCamera:ImageSource { // Not needed for camera inputs } } -*/ \ No newline at end of file +*/ diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index 41a0dd07..39517e03 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -4,10 +4,10 @@ import AVFoundation let initialBenchmarkFramesToIgnore = 5 public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { - public var orientation:ImageOrientation - public var runBenchmark:Bool = false - public var logFPS:Bool = false - public var audioEncodingTarget:AudioEncodingTarget? { + public var orientation: ImageOrientation + public var runBenchmark: Bool = false + public var logFPS: Bool = false + public var audioEncodingTarget: AudioEncodingTarget? { didSet { guard let audioEncodingTarget = audioEncodingTarget else { self.removeAudioInputsAndOutputs() @@ -23,28 +23,28 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } public let targets = TargetContainer() - let captureSession:AVCaptureSession - let inputCamera:AVCaptureDevice - let videoInput:AVCaptureDeviceInput! - let videoOutput:AVCaptureVideoDataOutput! - var microphone:AVCaptureDevice? - var audioInput:AVCaptureDeviceInput? - var audioOutput:AVCaptureAudioDataOutput? - - var supportsFullYUVRange:Bool = false - let captureAsYUV:Bool - let yuvConversionShader:ShaderProgram? - let frameRenderingSemaphore = DispatchSemaphore(value:1) + let captureSession: AVCaptureSession + let inputCamera: AVCaptureDevice + let videoInput: AVCaptureDeviceInput! + let videoOutput: AVCaptureVideoDataOutput! + var microphone: AVCaptureDevice? + var audioInput: AVCaptureDeviceInput? + var audioOutput: AVCaptureAudioDataOutput? + + var supportsFullYUVRange: Bool = false + let captureAsYUV: Bool + let yuvConversionShader: ShaderProgram? + let frameRenderingSemaphore = DispatchSemaphore(value: 1) let cameraProcessingQueue = standardProcessingQueue let audioProcessingQueue = lowProcessingQueue var numberOfFramesCaptured = 0 - var totalFrameTimeDuringCapture:Double = 0.0 + var totalFrameTimeDuringCapture: Double = 0.0 var framesSinceLastCheck = 0 var lastCheckTime = CFAbsoluteTimeGetCurrent() - public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, orientation:ImageOrientation = .portrait, captureAsYUV:Bool = true) throws { - self.inputCamera = cameraDevice ?? AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeVideo) + public init(sessionPreset: String, cameraDevice: AVCaptureDevice? = nil, orientation: ImageOrientation = .portrait, captureAsYUV: Bool = true) throws { + self.inputCamera = cameraDevice ?? AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) self.orientation = orientation self.captureAsYUV = captureAsYUV @@ -52,7 +52,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.captureSession.beginConfiguration() do { - self.videoInput = try AVCaptureDeviceInput(device:inputCamera) + self.videoInput = try AVCaptureDeviceInput(device: inputCamera) } catch { self.videoInput = nil self.videoOutput = nil @@ -60,7 +60,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer super.init() throw error } - if (captureSession.canAddInput(videoInput)) { + if captureSession.canAddInput(videoInput) { captureSession.addInput(videoInput) } @@ -72,24 +72,24 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer supportsFullYUVRange = false let supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes for currentPixelFormat in supportedPixelFormats! { - if ((currentPixelFormat as! NSNumber).int32Value == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { + if (currentPixelFormat as! NSNumber).int32Value == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { supportsFullYUVRange = true } } - if (supportsFullYUVRange) { - yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + if supportsFullYUVRange { + yuvConversionShader = crashOnShaderCompileFailure("Camera") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] } else { - yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] + yuvConversionShader = crashOnShaderCompileFailure("Camera") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionVideoRangeFragmentShader) } + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: NSNumber(value: Int32(kCVPixelFormatType_32BGRA))] } - if (captureSession.canAddOutput(videoOutput)) { + if captureSession.canAddOutput(videoOutput) { captureSession.addOutput(videoOutput) } captureSession.sessionPreset = sessionPreset @@ -97,24 +97,24 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer super.init() - videoOutput.setSampleBufferDelegate(self, queue:cameraProcessingQueue) + videoOutput.setSampleBufferDelegate(self, queue: cameraProcessingQueue) } deinit { - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { self.stopCapture() - self.videoOutput.setSampleBufferDelegate(nil, queue:nil) - self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) + self.videoOutput.setSampleBufferDelegate(nil, queue: nil) + self.audioOutput?.setSampleBufferDelegate(nil, queue: nil) } } public func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - guard (captureOutput != audioOutput) else { + guard captureOutput != audioOutput else { self.processAudioSampleBuffer(sampleBuffer) return } - guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return } + guard frameRenderingSemaphore.wait(timeout: DispatchTime.now()) == DispatchTimeoutResult.success else { return } let startTime = CFAbsoluteTimeGetCurrent() let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer)! @@ -123,33 +123,33 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - sharedImageProcessingContext.runOperationAsynchronously{ - let cameraFramebuffer:Framebuffer + sharedImageProcessingContext.runOperationAsynchronously { + let cameraFramebuffer: Framebuffer - if (self.captureAsYUV) { - let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.orientation, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + if self.captureAsYUV { + let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.orientation, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) luminanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE0)) glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 0)) - let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.orientation, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.orientation, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) chrominanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 1)) - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) - let conversionMatrix:Matrix3x3 - if (self.supportsFullYUVRange) { + let conversionMatrix: Matrix3x3 + if self.supportsFullYUVRange { conversionMatrix = colorConversionMatrix601FullRangeDefault } else { conversionMatrix = colorConversionMatrix601Default } - convertYUVToRGB(shader:self.yuvConversionShader!, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:cameraFramebuffer, colorConversionMatrix:conversionMatrix) + convertYUVToRGB(shader: self.yuvConversionShader!, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: cameraFramebuffer, colorConversionMatrix: conversionMatrix) } else { - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.orientation, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.orientation, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) glActiveTexture(GLenum(GL_TEXTURE0)) glBindTexture(GLenum(GL_TEXTURE_2D), cameraFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(cameraFrame)) @@ -161,7 +161,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if self.runBenchmark { self.numberOfFramesCaptured += 1 - if (self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore) { + if self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore { let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) self.totalFrameTimeDuringCapture += currentFrameTime print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured - initialBenchmarkFramesToIgnore)) ms") @@ -170,7 +170,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } if self.logFPS { - if ((CFAbsoluteTimeGetCurrent() - self.lastCheckTime) > 1.0) { + if (CFAbsoluteTimeGetCurrent() - self.lastCheckTime) > 1.0 { self.lastCheckTime = CFAbsoluteTimeGetCurrent() print("FPS: \(self.framesSinceLastCheck)") self.framesSinceLastCheck = 0 @@ -184,23 +184,23 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } public func startCapture() { - sharedImageProcessingContext.runOperationAsynchronously{ + sharedImageProcessingContext.runOperationAsynchronously { self.numberOfFramesCaptured = 0 self.totalFrameTimeDuringCapture = 0 } - if (!captureSession.isRunning) { + if !captureSession.isRunning { captureSession.startRunning() } } public func stopCapture() { - if (captureSession.isRunning) { + if captureSession.isRunning { captureSession.stopRunning() } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Not needed for camera inputs } @@ -208,14 +208,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer // MARK: Audio processing func addAudioInputsAndOutputs() throws { - guard (audioOutput == nil) else { return } + guard audioOutput == nil else { return } captureSession.beginConfiguration() defer { captureSession.commitConfiguration() } microphone = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) - audioInput = try AVCaptureDeviceInput(device:microphone) + audioInput = try AVCaptureDeviceInput(device: microphone) if captureSession.canAddInput(audioInput) { captureSession.addInput(audioInput) } @@ -223,11 +223,11 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if captureSession.canAddOutput(audioOutput) { captureSession.addOutput(audioOutput) } - audioOutput?.setSampleBufferDelegate(self, queue:audioProcessingQueue) + audioOutput?.setSampleBufferDelegate(self, queue: audioProcessingQueue) } func removeAudioInputsAndOutputs() { - guard (audioOutput != nil) else { return } + guard audioOutput != nil else { return } captureSession.beginConfiguration() captureSession.removeInput(audioInput!) @@ -238,7 +238,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.commitConfiguration() } - func processAudioSampleBuffer(_ sampleBuffer:CMSampleBuffer) { + func processAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) { self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) } } diff --git a/framework/Source/Mac/MovieInput.swift b/framework/Source/Mac/MovieInput.swift index 41528f51..b8cf8043 100644 --- a/framework/Source/Mac/MovieInput.swift +++ b/framework/Source/Mac/MovieInput.swift @@ -4,39 +4,39 @@ public class MovieInput: ImageSource { public let targets = TargetContainer() public var runBenchmark = false - let yuvConversionShader:ShaderProgram - let asset:AVAsset - let assetReader:AVAssetReader - let playAtActualSpeed:Bool - let loop:Bool + let yuvConversionShader: ShaderProgram + let asset: AVAsset + let assetReader: AVAssetReader + let playAtActualSpeed: Bool + let loop: Bool var videoEncodingIsFinished = false var previousFrameTime = kCMTimeZero var previousActualFrameTime = CFAbsoluteTimeGetCurrent() var numberOfFramesCaptured = 0 - var totalFrameTimeDuringCapture:Double = 0.0 + var totalFrameTimeDuringCapture: Double = 0.0 // TODO: Add movie reader synchronization // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works - public init(asset:AVAsset, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + public init(asset: AVAsset, playAtActualSpeed: Bool = false, loop: Bool = false) throws { self.asset = asset self.playAtActualSpeed = playAtActualSpeed self.loop = loop - self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} + self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } - assetReader = try AVAssetReader(asset:self.asset) + assetReader = try AVAssetReader(asset: self.asset) - let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) + let outputSettings: [String: AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String): NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings: outputSettings) readerVideoTrackOutput.alwaysCopiesSampleData = false assetReader.add(readerVideoTrackOutput) // TODO: Audio here } - public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] - let inputAsset = AVURLAsset(url:url, options:inputOptions) - try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop) + public convenience init(url: URL, playAtActualSpeed: Bool = false, loop: Bool = false) throws { + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] + let inputAsset = AVURLAsset(url: url, options: inputOptions) + try self.init(asset: inputAsset, playAtActualSpeed: playAtActualSpeed, loop: loop) } // MARK: - @@ -45,29 +45,29 @@ public class MovieInput: ImageSource { public func start() { asset.loadValuesAsynchronously(forKeys: ["tracks"], completionHandler: { DispatchQueue.global().async { - guard (self.asset.statusOfValue(forKey:"tracks", error:nil) == .loaded) else { return } + guard self.asset.statusOfValue(forKey: "tracks", error: nil) == .loaded else { return } guard self.assetReader.startReading() else { debugPrint("Couldn't start reading") return } - var readerVideoTrackOutput:AVAssetReaderOutput? = nil; + var readerVideoTrackOutput: AVAssetReaderOutput? for output in self.assetReader.outputs { - if(output.mediaType == AVMediaTypeVideo) { - readerVideoTrackOutput = output; + if output.mediaType == AVMediaTypeVideo { + readerVideoTrackOutput = output } } - while (self.assetReader.status == .reading) { - self.readNextVideoFrame(from:readerVideoTrackOutput!) + while self.assetReader.status == .reading { + self.readNextVideoFrame(from: readerVideoTrackOutput!) } - if (self.assetReader.status == .completed) { + if self.assetReader.status == .completed { self.assetReader.cancelReading() - if (self.loop) { + if self.loop { // TODO: Restart movie processing } else { self.endProcessing() @@ -83,16 +83,15 @@ public class MovieInput: ImageSource { } func endProcessing() { - } // MARK: - // MARK: Internal processing functions - func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { - if ((assetReader.status == .reading) && !videoEncodingIsFinished) { + func readNextVideoFrame(from videoTrackOutput: AVAssetReaderOutput) { + if (assetReader.status == .reading) && !videoEncodingIsFinished { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { - if (playAtActualSpeed) { + if playAtActualSpeed { // Do this outside of the video processing queue to not slow that down while waiting let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime) @@ -101,7 +100,7 @@ public class MovieInput: ImageSource { let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame) let actualTimeDifference = currentActualTime - previousActualFrameTime - if (frameTimeDifference > actualTimeDifference) { + if frameTimeDifference > actualTimeDifference { usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference)))) } @@ -109,14 +108,14 @@ public class MovieInput: ImageSource { previousActualFrameTime = CFAbsoluteTimeGetCurrent() } - sharedImageProcessingContext.runOperationSynchronously{ - self.process(movieFrame:sampleBuffer) + sharedImageProcessingContext.runOperationSynchronously { + self.process(movieFrame: sampleBuffer) CMSampleBufferInvalidate(sampleBuffer) } } else { - if (!loop) { + if !loop { videoEncodingIsFinished = true - if (videoEncodingIsFinished) { + if videoEncodingIsFinished { self.endProcessing() } } @@ -130,15 +129,15 @@ public class MovieInput: ImageSource { } - func process(movieFrame frame:CMSampleBuffer) { + func process(movieFrame frame: CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! // processingFrameTime = currentSampleTime - self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) + self.process(movieFrame: movieFrame, withSampleTime: currentSampleTime) } - func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { + func process(movieFrame: CVPixelBuffer, withSampleTime: CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) @@ -157,21 +156,21 @@ public class MovieInput: ImageSource { let startTime = CFAbsoluteTimeGetCurrent() - let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) luminanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE0)) glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 0)) - let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) chrominanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 1)) - let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) - convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) + convertYUVToRGB(shader: self.yuvConversionShader, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: movieFramebuffer, colorConversionMatrix: conversionMatrix) CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) @@ -186,7 +185,7 @@ public class MovieInput: ImageSource { } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Not needed for movie inputs } } diff --git a/framework/Source/Mac/MovieOutput.swift b/framework/Source/Mac/MovieOutput.swift index a9b9705d..e6a16c5c 100644 --- a/framework/Source/Mac/MovieOutput.swift +++ b/framework/Source/Mac/MovieOutput.swift @@ -2,78 +2,78 @@ import AVFoundation public protocol AudioEncodingTarget { func activateAudioTrack() - func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) + func processAudioBuffer(_ sampleBuffer: CMSampleBuffer) } public class MovieOutput: ImageConsumer, AudioEncodingTarget { public let sources = SourceContainer() - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 - let assetWriter:AVAssetWriter - let assetWriterVideoInput:AVAssetWriterInput - var assetWriterAudioInput:AVAssetWriterInput? - let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor - let size:Size + let assetWriter: AVAssetWriter + let assetWriterVideoInput: AVAssetWriterInput + var assetWriterAudioInput: AVAssetWriterInput? + let assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor + let size: Size private var isRecording = false private var videoEncodingIsFinished = false private var audioEncodingIsFinished = false - private var startTime:CMTime? + private var startTime: CMTime? private var previousFrameTime = kCMTimeNegativeInfinity private var previousAudioTime = kCMTimeNegativeInfinity - private var encodingLiveVideo:Bool + private var encodingLiveVideo: Bool - public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { + public init(URL: Foundation.URL, size: Size, fileType: String = AVFileTypeQuickTimeMovie, liveVideo: Bool = false, settings: [String: AnyObject]? = nil) throws { self.size = size - assetWriter = try AVAssetWriter(url:URL, fileType:fileType) + assetWriter = try AVAssetWriter(url: URL, fileType: fileType) // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) - var localSettings:[String:AnyObject] + var localSettings: [String: AnyObject] if let settings = settings { localSettings = settings } else { - localSettings = [String:AnyObject]() + localSettings = [String: AnyObject]() } - localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) - localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString + localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value: size.width) + localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value: size.height) + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString - assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) + assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. - let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value:Int32(kCVPixelFormatType_32BGRA)), - kCVPixelBufferWidthKey as String:NSNumber(value:size.width), - kCVPixelBufferHeightKey as String:NSNumber(value:size.height)] + let sourcePixelBufferAttributesDictionary: [String: AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: Int32(kCVPixelFormatType_32BGRA)), + kCVPixelBufferWidthKey as String: NSNumber(value: size.width), + kCVPixelBufferHeightKey as String: NSNumber(value: size.height)] - assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) + assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) assetWriter.add(assetWriterVideoInput) } public func startRecording() { startTime = nil - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { self.isRecording = self.assetWriter.startWriting() } } public func finishRecording(_ completionCallback:(() -> Void)? = nil) { - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { self.isRecording = false - if (self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown) { - sharedImageProcessingContext.runOperationAsynchronously{ + if self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown { + sharedImageProcessingContext.runOperationAsynchronously { completionCallback?() } return } - if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) { + if (self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished) { self.videoEncodingIsFinished = true self.assetWriterVideoInput.markAsFinished() } - if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) { + if (self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished) { self.audioEncodingIsFinished = true self.assetWriterAudioInput?.markAsFinished() } @@ -82,13 +82,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if let callback = completionCallback { self.assetWriter.finishWriting(completionHandler: callback) } else { - self.assetWriter.finishWriting{} + self.assetWriter.finishWriting {} } } } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { defer { framebuffer.unlock() } @@ -97,10 +97,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // Ignore still images and other non-video updates (do I still need this?) guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case - guard (frameTime != previousFrameTime) else { return } + guard frameTime != previousFrameTime else { return } - if (startTime == nil) { - if (assetWriter.status != .writing) { + if startTime == nil { + if assetWriter.status != .writing { assetWriter.startWriting() } @@ -109,35 +109,33 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } // TODO: Run the following on an internal movie recording dispatch queue, context - guard (assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo)) else { + guard assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo) else { debugPrint("Had to drop a frame at time \(frameTime)") return } - var pixelBufferFromPool:CVPixelBuffer? = nil + var pixelBufferFromPool: CVPixelBuffer? let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBufferFromPool) guard let pixelBuffer = pixelBufferFromPool, (pixelBufferStatus == kCVReturnSuccess) else { return } - - - renderIntoPixelBuffer(pixelBuffer, framebuffer:framebuffer) + renderIntoPixelBuffer(pixelBuffer, framebuffer: framebuffer) - if (!assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime:frameTime)) { + if !assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime: frameTime) { print("Problem appending pixel buffer at time: \(frameTime)") } CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) } - func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { - let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size)) + func renderIntoPixelBuffer(_ pixelBuffer: CVPixelBuffer, framebuffer: Framebuffer) { + let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: framebuffer.orientation, size: GLSize(self.size)) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)]) CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) @@ -149,18 +147,18 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func activateAudioTrack() { // TODO: Add ability to set custom output settings - assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:nil) + assetWriterAudioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: nil) assetWriter.add(assetWriterAudioInput!) assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } - public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) { + public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer) { guard let assetWriterAudioInput = assetWriterAudioInput else { return } - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - if (self.startTime == nil) { - if (self.assetWriter.status != .writing) { + if self.startTime == nil { + if self.assetWriter.status != .writing { self.assetWriter.startWriting() } @@ -168,27 +166,26 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.startTime = currentSampleTime } - guard (assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { + guard assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo) else { return } - if (!assetWriterAudioInput.append(sampleBuffer)) { + if !assetWriterAudioInput.append(sampleBuffer) { print("Trouble appending audio sample buffer") } } } } - public extension Timestamp { - public init(_ time:CMTime) { + public init(_ time: CMTime) { self.value = time.value self.timescale = time.timescale - self.flags = TimestampFlags(rawValue:time.flags.rawValue) + self.flags = TimestampFlags(rawValue: time.flags.rawValue) self.epoch = time.epoch } - public var asCMTime:CMTime { + public var asCMTime: CMTime { get { return CMTimeMakeWithEpoch(value, timescale, epoch) } diff --git a/framework/Source/Mac/OpenGLContext.swift b/framework/Source/Mac/OpenGLContext.swift index fab790dc..9a45bb8e 100755 --- a/framework/Source/Mac/OpenGLContext.swift +++ b/framework/Source/Mac/OpenGLContext.swift @@ -4,47 +4,47 @@ import Cocoa // TODO: Figure out way to allow for multiple contexts for different GPUs public class OpenGLContext: SerialDispatch { - public lazy var framebufferCache:FramebufferCache = { - return FramebufferCache(context:self) + public lazy var framebufferCache: FramebufferCache = { + return FramebufferCache(context: self) }() - var shaderCache:[String:ShaderProgram] = [:] - public let standardImageVBO:GLuint - var textureVBOs:[Rotation:GLuint] = [:] + var shaderCache: [String: ShaderProgram] = [:] + public let standardImageVBO: GLuint + var textureVBOs: [Rotation: GLuint] = [:] - let context:NSOpenGLContext + let context: NSOpenGLContext - lazy var passthroughShader:ShaderProgram = { - return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + lazy var passthroughShader: ShaderProgram = { + return crashOnShaderCompileFailure("OpenGLContext") { return try self.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() - public let serialDispatchQueue:DispatchQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) + public let serialDispatchQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) public let dispatchQueueKey = DispatchSpecificKey() // MARK: - // MARK: Initialization and teardown init() { - serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:81) + serialDispatchQueue.setSpecific(key: dispatchQueueKey, value: 81) - let pixelFormatAttributes:[NSOpenGLPixelFormatAttribute] = [ + let pixelFormatAttributes: [NSOpenGLPixelFormatAttribute] = [ NSOpenGLPixelFormatAttribute(NSOpenGLPFADoubleBuffer), NSOpenGLPixelFormatAttribute(NSOpenGLPFAAccelerated), 0, 0 ] - guard let pixelFormat = NSOpenGLPixelFormat(attributes:pixelFormatAttributes) else { + guard let pixelFormat = NSOpenGLPixelFormat(attributes: pixelFormatAttributes) else { fatalError("No appropriate pixel format found when creating OpenGL context.") } // TODO: Take into account the sharegroup - guard let generatedContext = NSOpenGLContext(format:pixelFormat, share:nil) else { + guard let generatedContext = NSOpenGLContext(format: pixelFormat, share: nil) else { fatalError("Unable to create an OpenGL context. The GPUImage framework requires OpenGL support to work.") } self.context = generatedContext generatedContext.makeCurrentContext() - standardImageVBO = generateVBO(for:standardImageVertices) + standardImageVBO = generateVBO(for: standardImageVertices) generateTextureVBOs() glDisable(GLenum(GL_DEPTH_TEST)) @@ -65,25 +65,25 @@ public class OpenGLContext: SerialDispatch { // MARK: - // MARK: Device capabilities - public var maximumTextureSizeForThisDevice:GLint {get { return _maximumTextureSizeForThisDevice } } - private lazy var _maximumTextureSizeForThisDevice:GLint = { + public var maximumTextureSizeForThisDevice: GLint { get { return _maximumTextureSizeForThisDevice } } + private lazy var _maximumTextureSizeForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_SIZE) }() - public var maximumTextureUnitsForThisDevice:GLint {get { return _maximumTextureUnitsForThisDevice } } - private lazy var _maximumTextureUnitsForThisDevice:GLint = { + public var maximumTextureUnitsForThisDevice: GLint { get { return _maximumTextureUnitsForThisDevice } } + private lazy var _maximumTextureUnitsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_IMAGE_UNITS) }() - public var maximumVaryingVectorsForThisDevice:GLint {get { return _maximumVaryingVectorsForThisDevice } } - private lazy var _maximumVaryingVectorsForThisDevice:GLint = { + public var maximumVaryingVectorsForThisDevice: GLint { get { return _maximumVaryingVectorsForThisDevice } } + private lazy var _maximumVaryingVectorsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_VARYING_VECTORS) }() - lazy var extensionString:String = { - return self.runOperationSynchronously{ + lazy var extensionString: String = { + return self.runOperationSynchronously { self.makeCurrentContext() - return String(cString:unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to:UnsafePointer.self)) + return String(cString: unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to: UnsafePointer.self)) } }() } diff --git a/framework/Source/Mac/PictureInput.swift b/framework/Source/Mac/PictureInput.swift index fae3511d..91ce44c5 100755 --- a/framework/Source/Mac/PictureInput.swift +++ b/framework/Source/Mac/PictureInput.swift @@ -3,30 +3,30 @@ import Cocoa public class PictureInput: ImageSource { public let targets = TargetContainer() - var imageFramebuffer:Framebuffer! - var hasProcessedImage:Bool = false + var imageFramebuffer: Framebuffer! + var hasProcessedImage: Bool = false - public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { + public init(image: CGImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation = .portrait) { // TODO: Dispatch this whole thing asynchronously to move image loading off main thread let widthOfImage = GLint(image.width) let heightOfImage = GLint(image.height) // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. - guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } + guard (widthOfImage > 0) && (heightOfImage > 0) else { fatalError("Tried to pass in a zero-sized image") } var widthToUseForTexture = widthOfImage var heightToUseForTexture = heightOfImage var shouldRedrawUsingCoreGraphics = false // For now, deal with images larger than the maximum texture size by resizing to be within that limit - let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width:Float(widthOfImage), height:Float(heightOfImage)))) - if ((scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage)) { + let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width: Float(widthOfImage), height: Float(heightOfImage)))) + if (scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage) { widthToUseForTexture = scaledImageSizeToFitOnGPU.width heightToUseForTexture = scaledImageSizeToFitOnGPU.height shouldRedrawUsingCoreGraphics = true } - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill let powerClosestToWidth = ceil(log2(Float(widthToUseForTexture))) let powerClosestToHeight = ceil(log2(Float(heightToUseForTexture))) @@ -36,33 +36,32 @@ public class PictureInput: ImageSource { shouldRedrawUsingCoreGraphics = true } - var imageData:UnsafeMutablePointer! - var dataFromImageDataProvider:CFData! + var imageData: UnsafeMutablePointer! + var dataFromImageDataProvider: CFData! var format = GL_BGRA - if (!shouldRedrawUsingCoreGraphics) { + if !shouldRedrawUsingCoreGraphics { /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to * tell GL about the memory layout with GLES. */ - if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) - { + if (image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8) { shouldRedrawUsingCoreGraphics = true } else { /* Check that the bitmap pixel format is compatible with GL */ let bitmapInfo = image.bitmapInfo - if (bitmapInfo.contains(.floatComponents)) { + if bitmapInfo.contains(.floatComponents) { /* We don't support float components for use directly in GL */ shouldRedrawUsingCoreGraphics = true } else { - let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) - if (bitmapInfo.contains(.byteOrder32Little)) { + let alphaInfo = CGImageAlphaInfo(rawValue: bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) + if bitmapInfo.contains(.byteOrder32Little) { /* Little endian, for alpha-first we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { + if (alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst) { shouldRedrawUsingCoreGraphics = true } - } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { + } else if (bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big)) { /* Big endian, for alpha-last we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { + if (alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast) { shouldRedrawUsingCoreGraphics = true } else { /* Can access directly using GL_RGBA pixel format */ @@ -75,24 +74,24 @@ public class PictureInput: ImageSource { // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); - if (shouldRedrawUsingCoreGraphics) { + if shouldRedrawUsingCoreGraphics { // For resized or incompatible image: redraw - imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData = UnsafeMutablePointer.allocate(capacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() - let imageContext = CGContext(data:imageData, width:Int(widthToUseForTexture), height:Int(heightToUseForTexture), bitsPerComponent:8, bytesPerRow:Int(widthToUseForTexture) * 4, space:genericRGBColorspace, bitmapInfo:CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) + let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html - imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) + imageContext?.draw(image, in: CGRect(x: 0.0, y: 0.0, width: CGFloat(widthToUseForTexture), height: CGFloat(heightToUseForTexture))) } else { // Access the raw image bytes directly dataFromImageDataProvider = image.dataProvider?.data - imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)!) + imageData = UnsafeMutablePointer(mutating: CFDataGetBytePtr(dataFromImageDataProvider)!) } sharedImageProcessingContext.makeCurrentContext() do { - imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) + imageFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: orientation, size: GLSize(width: widthToUseForTexture, height: heightToUseForTexture), textureOnly: true) imageFramebuffer.timingStyle = .stillImage } catch { fatalError("ERROR: Unable to initialize framebuffer of size (\(widthToUseForTexture), \(heightToUseForTexture)) with error: \(error)") @@ -100,40 +99,40 @@ public class PictureInput: ImageSource { glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), imageFramebuffer.texture) - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR_MIPMAP_LINEAR) } glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, widthToUseForTexture, heightToUseForTexture, 0, GLenum(format), GLenum(GL_UNSIGNED_BYTE), imageData) - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { glGenerateMipmap(GLenum(GL_TEXTURE_2D)) } glBindTexture(GLenum(GL_TEXTURE_2D), 0) - if (shouldRedrawUsingCoreGraphics) { - imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + if shouldRedrawUsingCoreGraphics { + imageData.deallocate(capacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) } } - public convenience init(image:NSImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - self.init(image:image.cgImage(forProposedRect:nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(image: NSImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation = .portrait) { + self.init(image: image.cgImage(forProposedRect: nil, context: nil, hints: nil)!, smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation) } - public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { - guard let image = NSImage(named:imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } - self.init(image:image.cgImage(forProposedRect:nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(imageName: String, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation = .portrait) { + guard let image = NSImage(named: imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } + self.init(image: image.cgImage(forProposedRect: nil, context: nil, hints: nil)!, smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation) } - public func processImage(synchronously:Bool = false) { + public func processImage(synchronously: Bool = false) { if synchronously { - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { sharedImageProcessingContext.makeCurrentContext() self.updateTargetsWithFramebuffer(self.imageFramebuffer) self.hasProcessedImage = true } } else { - sharedImageProcessingContext.runOperationAsynchronously{ + sharedImageProcessingContext.runOperationAsynchronously { sharedImageProcessingContext.makeCurrentContext() self.updateTargetsWithFramebuffer(self.imageFramebuffer) self.hasProcessedImage = true @@ -141,10 +140,10 @@ public class PictureInput: ImageSource { } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { if hasProcessedImage { imageFramebuffer.lock() - target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) + target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex: atIndex) } } } diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index 59232ce4..343e91c9 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -7,14 +7,14 @@ public enum PictureFileFormat { } public class PictureOutput: ImageConsumer { - public var encodedImageAvailableCallback:((Data) -> ())? - public var encodedImageFormat:PictureFileFormat = .png - public var imageAvailableCallback:((NSImage) -> ())? - public var onlyCaptureNextFrame:Bool = true + public var encodedImageAvailableCallback: ((Data) -> Void)? + public var encodedImageFormat: PictureFileFormat = .png + public var imageAvailableCallback: ((NSImage) -> Void)? + public var onlyCaptureNextFrame: Bool = true public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - var url:URL! + public let maximumInputs: UInt = 1 + var url: URL! public init() { } @@ -22,14 +22,14 @@ public class PictureOutput: ImageConsumer { deinit { } - public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url: URL, format: PictureFileFormat) { onlyCaptureNextFrame = true encodedImageFormat = format self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured encodedImageAvailableCallback = {imageData in do { // FIXME: Xcode 8 beta 2 - try imageData.write(to: self.url, options:.atomic) + try imageData.write(to: self.url, options: .atomic) // try imageData.write(to: self.url, options:NSData.WritingOptions.dataWritingAtomic) } catch { // TODO: Handle this better @@ -39,8 +39,8 @@ public class PictureOutput: ImageConsumer { } // TODO: Replace with texture caches and a safer capture routine - func cgImageFromFramebuffer(_ framebuffer:Framebuffer) -> CGImage { - let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) + func cgImageFromFramebuffer(_ framebuffer: Framebuffer) -> CGImage { + let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: framebuffer.orientation, size: framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.transparent) @@ -48,26 +48,26 @@ public class PictureOutput: ImageConsumer { // Need the blending here to enable non-1.0 alpha on output image enableAdditiveBlending() - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)]) disableBlending() framebuffer.unlock() let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) - let data = UnsafeMutablePointer.allocate(capacity:imageByteSize) + let data = UnsafeMutablePointer.allocate(capacity: imageByteSize) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) renderFramebuffer.unlock() - guard let dataProvider = CGDataProvider(dataInfo: nil, data: data, size: imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not create CGDataProvider")} + guard let dataProvider = CGDataProvider(dataInfo: nil, data: data, size: imageByteSize, releaseData: dataProviderReleaseCallback) else { fatalError("Could not create CGDataProvider") } let defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB() - return CGImage(width: Int(framebuffer.size.width), height: Int(framebuffer.size.height), bitsPerComponent:8, bitsPerPixel:32, bytesPerRow:4 * Int(framebuffer.size.width), space:defaultRGBColorSpace, bitmapInfo:CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider:dataProvider, decode:nil, shouldInterpolate:false, intent:.defaultIntent)! + return CGImage(width: Int(framebuffer.size.width), height: Int(framebuffer.size.height), bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: 4 * Int(framebuffer.size.width), space: defaultRGBColorSpace, bitmapInfo: CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider: dataProvider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)! } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { if let imageCallback = imageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) - let image = NSImage(cgImage:cgImageFromBytes, size:NSZeroSize) + let image = NSImage(cgImage: cgImageFromBytes, size: NSSize.zero) imageCallback(image) @@ -78,11 +78,11 @@ public class PictureOutput: ImageConsumer { if let imageCallback = encodedImageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) - let bitmapRepresentation = NSBitmapImageRep(cgImage:cgImageFromBytes) - let imageData:Data + let bitmapRepresentation = NSBitmapImageRep(cgImage: cgImageFromBytes) + let imageData: Data switch encodedImageFormat { - case .png: imageData = bitmapRepresentation.representation(using: .PNG, properties: ["":""])! - case .jpeg: imageData = bitmapRepresentation.representation(using: .JPEG, properties: ["":""])! + case .png: imageData = bitmapRepresentation.representation(using: .PNG, properties: ["": ""])! + case .jpeg: imageData = bitmapRepresentation.representation(using: .JPEG, properties: ["": ""])! } imageCallback(imageData) @@ -95,37 +95,37 @@ public class PictureOutput: ImageConsumer { } public extension ImageSource { - public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url: URL, format: PictureFileFormat) { let pictureOutput = PictureOutput() - pictureOutput.saveNextFrameToURL(url, format:format) + pictureOutput.saveNextFrameToURL(url, format: format) self --> pictureOutput } } public extension NSImage { - public func filterWithOperation(_ operation:T) -> NSImage { - return filterWithPipeline{input, output in + public func filterWithOperation(_ operation: T) -> NSImage { + return filterWithPipeline {input, output in input --> operation --> output } } - public func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) -> NSImage { - let picture = PictureInput(image:self) - var outputImage:NSImage? + public func filterWithPipeline(_ pipeline: (PictureInput, PictureOutput) -> Void) -> NSImage { + let picture = PictureInput(image: self) + var outputImage: NSImage? let pictureOutput = PictureOutput() pictureOutput.onlyCaptureNextFrame = true pictureOutput.imageAvailableCallback = {image in outputImage = image } pipeline(picture, pictureOutput) - picture.processImage(synchronously:true) + picture.processImage(synchronously: true) return outputImage! } } // Why are these flipped in the callback definition? -func dataProviderReleaseCallback(_ context:UnsafeMutableRawPointer?, data:UnsafeRawPointer, size:Int) { +func dataProviderReleaseCallback(_ context: UnsafeMutableRawPointer?, data: UnsafeRawPointer, size: Int) { // UnsafeMutablePointer(data).deallocate(capacity:size) // FIXME: Verify this is correct - data.deallocate(bytes:size, alignedTo:1) + data.deallocate(bytes: size, alignedTo: 1) } diff --git a/framework/Source/Mac/RenderView.swift b/framework/Source/Mac/RenderView.swift index ddcb502f..d48d1d6e 100755 --- a/framework/Source/Mac/RenderView.swift +++ b/framework/Source/Mac/RenderView.swift @@ -1,13 +1,13 @@ import Cocoa -public class RenderView:NSOpenGLView, ImageConsumer { +public class RenderView: NSOpenGLView, ImageConsumer { public var backgroundColor = Color.black public var fillMode = FillMode.preserveAspectRatio - public var sizeInPixels:Size { get { return Size(width:Float(self.frame.size.width), height:Float(self.frame.size.width)) } } + public var sizeInPixels: Size { get { return Size(width: Float(self.frame.size.width), height: Float(self.frame.size.width)) } } public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - private lazy var displayShader:ShaderProgram = { + public let maximumInputs: UInt = 1 + private lazy var displayShader: ShaderProgram = { sharedImageProcessingContext.makeCurrentContext() self.openGLContext = sharedImageProcessingContext.context return sharedImageProcessingContext.passthroughShader @@ -15,18 +15,18 @@ public class RenderView:NSOpenGLView, ImageConsumer { // TODO: Need to set viewport to appropriate size, resize viewport on view reshape - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), 0) - let viewSize = GLSize(width:GLint(round(self.bounds.size.width)), height:GLint(round(self.bounds.size.height))) + let viewSize = GLSize(width: GLint(round(self.bounds.size.width)), height: GLint(round(self.bounds.size.height))) glViewport(0, 0, viewSize.width, viewSize.height) clearFramebufferWithColor(backgroundColor) // TODO: Cache these scaled vertices - let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(.portrait), toFitSize:viewSize) - renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(.portrait)]) + let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize: framebuffer.sizeForTargetOrientation(.portrait), toFitSize: viewSize) + renderQuadWithShader(self.displayShader, vertices: scaledVertices, inputTextures: [framebuffer.texturePropertiesForTargetOrientation(.portrait)]) sharedImageProcessingContext.presentBufferForDisplay() framebuffer.unlock() diff --git a/framework/Source/Matrix.swift b/framework/Source/Matrix.swift index ed563fdf..199d2acc 100644 --- a/framework/Source/Matrix.swift +++ b/framework/Source/Matrix.swift @@ -3,12 +3,12 @@ import QuartzCore #endif public struct Matrix4x4 { - public let m11:Float, m12:Float, m13:Float, m14:Float - public let m21:Float, m22:Float, m23:Float, m24:Float - public let m31:Float, m32:Float, m33:Float, m34:Float - public let m41:Float, m42:Float, m43:Float, m44:Float + public let m11: Float, m12: Float, m13: Float, m14: Float + public let m21: Float, m22: Float, m23: Float, m24: Float + public let m31: Float, m32: Float, m33: Float, m34: Float + public let m41: Float, m42: Float, m43: Float, m44: Float - public init(rowMajorValues:[Float]) { + public init(rowMajorValues: [Float]) { guard rowMajorValues.count > 15 else { fatalError("Tried to initialize a 4x4 matrix with fewer than 16 values") } self.m11 = rowMajorValues[0] @@ -32,18 +32,18 @@ public struct Matrix4x4 { self.m44 = rowMajorValues[15] } - public static let identity = Matrix4x4(rowMajorValues:[1.0, 0.0, 0.0, 0.0, + public static let identity = Matrix4x4(rowMajorValues: [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0]) } public struct Matrix3x3 { - public let m11:Float, m12:Float, m13:Float - public let m21:Float, m22:Float, m23:Float - public let m31:Float, m32:Float, m33:Float + public let m11: Float, m12: Float, m13: Float + public let m21: Float, m22: Float, m23: Float + public let m31: Float, m32: Float, m33: Float - public init(rowMajorValues:[Float]) { + public init(rowMajorValues: [Float]) { guard rowMajorValues.count > 8 else { fatalError("Tried to initialize a 3x3 matrix with fewer than 9 values") } self.m11 = rowMajorValues[0] @@ -59,16 +59,16 @@ public struct Matrix3x3 { self.m33 = rowMajorValues[8] } - public static let identity = Matrix3x3(rowMajorValues:[1.0, 0.0, 0.0, + public static let identity = Matrix3x3(rowMajorValues: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]) - public static let centerOnly = Matrix3x3(rowMajorValues:[0.0, 0.0, 0.0, + public static let centerOnly = Matrix3x3(rowMajorValues: [0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]) } -func orthographicMatrix(_ left:Float, right:Float, bottom:Float, top:Float, near:Float, far:Float, anchorTopLeft:Bool = false) -> Matrix4x4 { +func orthographicMatrix(_ left: Float, right: Float, bottom: Float, top: Float, near: Float, far: Float, anchorTopLeft: Bool = false) -> Matrix4x4 { let r_l = right - left let t_b = top - bottom let f_n = far - near @@ -76,8 +76,8 @@ func orthographicMatrix(_ left:Float, right:Float, bottom:Float, top:Float, near var ty = -(top + bottom) / (top - bottom) let tz = -(far + near) / (far - near) - let scale:Float - if (anchorTopLeft) { + let scale: Float + if anchorTopLeft { scale = 4.0 tx = -1.0 ty = -1.0 @@ -85,17 +85,16 @@ func orthographicMatrix(_ left:Float, right:Float, bottom:Float, top:Float, near scale = 2.0 } - return Matrix4x4(rowMajorValues:[ + return Matrix4x4(rowMajorValues: [ scale / r_l, 0.0, 0.0, tx, 0.0, scale / t_b, 0.0, ty, 0.0, 0.0, scale / f_n, tz, 0.0, 0.0, 0.0, 1.0]) } - #if !os(Linux) public extension Matrix4x4 { - init (_ transform3D:CATransform3D) { + init (_ transform3D: CATransform3D) { self.m11 = Float(transform3D.m11) self.m12 = Float(transform3D.m12) self.m13 = Float(transform3D.m13) @@ -117,7 +116,7 @@ public extension Matrix4x4 { self.m44 = Float(transform3D.m44) } - init (_ transform:CGAffineTransform) { + init (_ transform: CGAffineTransform) { self.init(CATransform3DMakeAffineTransform(transform)) } } diff --git a/framework/Source/OpenGLContext_Shared.swift b/framework/Source/OpenGLContext_Shared.swift index e7f9d5ae..a68dd1dd 100755 --- a/framework/Source/OpenGLContext_Shared.swift +++ b/framework/Source/OpenGLContext_Shared.swift @@ -1,4 +1,3 @@ - #if os(Linux) #if GLES import COpenGLES.gles2 @@ -18,14 +17,14 @@ import Foundation public let sharedImageProcessingContext = OpenGLContext() extension OpenGLContext { - public func programForVertexShader(_ vertexShader:String, fragmentShader:String) throws -> ShaderProgram { - return try self.runOperationSynchronously{ + public func programForVertexShader(_ vertexShader: String, fragmentShader: String) throws -> ShaderProgram { + return try self.runOperationSynchronously { let lookupKeyForShaderProgram = "V: \(vertexShader) - F: \(fragmentShader)" if let shaderFromCache = shaderCache[lookupKeyForShaderProgram] { // debugPrint("load from cache: \(lookupKeyForShaderProgram)") return shaderFromCache } else { - let program = try ShaderProgram(vertexShader:vertexShader, fragmentShader:fragmentShader) + let program = try ShaderProgram(vertexShader: vertexShader, fragmentShader: fragmentShader) self.shaderCache[lookupKeyForShaderProgram] = program // debugPrint("create cache: \(lookupKeyForShaderProgram)") return program @@ -33,24 +32,24 @@ extension OpenGLContext { } } - public func programForVertexShader(_ vertexShader:String, fragmentShader:URL) throws -> ShaderProgram { - return try programForVertexShader(vertexShader, fragmentShader:try shaderFromFile(fragmentShader)) + public func programForVertexShader(_ vertexShader: String, fragmentShader: URL) throws -> ShaderProgram { + return try programForVertexShader(vertexShader, fragmentShader: try shaderFromFile(fragmentShader)) } - public func programForVertexShader(_ vertexShader:URL, fragmentShader:URL) throws -> ShaderProgram { - return try programForVertexShader(try shaderFromFile(vertexShader), fragmentShader:try shaderFromFile(fragmentShader)) + public func programForVertexShader(_ vertexShader: URL, fragmentShader: URL) throws -> ShaderProgram { + return try programForVertexShader(try shaderFromFile(vertexShader), fragmentShader: try shaderFromFile(fragmentShader)) } - public func openGLDeviceSettingForOption(_ option:Int32) -> GLint { - return self.runOperationSynchronously{() -> GLint in + public func openGLDeviceSettingForOption(_ option: Int32) -> GLint { + return self.runOperationSynchronously {() -> GLint in self.makeCurrentContext() - var openGLValue:GLint = 0 + var openGLValue: GLint = 0 glGetIntegerv(GLenum(option), &openGLValue) return openGLValue } } - public func deviceSupportsExtension(_ openGLExtension:String) -> Bool { + public func deviceSupportsExtension(_ openGLExtension: String) -> Bool { #if os(Linux) return false #else @@ -68,51 +67,49 @@ extension OpenGLContext { return deviceSupportsExtension("GL_EXT_shader_framebuffer_fetch") } - public func sizeThatFitsWithinATextureForSize(_ size:Size) -> Size { + public func sizeThatFitsWithinATextureForSize(_ size: Size) -> Size { let maxTextureSize = Float(self.maximumTextureSizeForThisDevice) - if ( (size.width < maxTextureSize) && (size.height < maxTextureSize) ) { + if (size.width < maxTextureSize) && (size.height < maxTextureSize) { return size } - let adjustedSize:Size - if (size.width > size.height) { - adjustedSize = Size(width:maxTextureSize, height:(maxTextureSize / size.width) * size.height) + let adjustedSize: Size + if size.width > size.height { + adjustedSize = Size(width: maxTextureSize, height: (maxTextureSize / size.width) * size.height) } else { - adjustedSize = Size(width:(maxTextureSize / size.height) * size.width, height:maxTextureSize) + adjustedSize = Size(width: (maxTextureSize / size.height) * size.width, height: maxTextureSize) } return adjustedSize } func generateTextureVBOs() { - textureVBOs[.noRotation] = generateVBO(for:Rotation.noRotation.textureCoordinates()) - textureVBOs[.rotateCounterclockwise] = generateVBO(for:Rotation.rotateCounterclockwise.textureCoordinates()) - textureVBOs[.rotateClockwise] = generateVBO(for:Rotation.rotateClockwise.textureCoordinates()) - textureVBOs[.rotate180] = generateVBO(for:Rotation.rotate180.textureCoordinates()) - textureVBOs[.flipHorizontally] = generateVBO(for:Rotation.flipHorizontally.textureCoordinates()) - textureVBOs[.flipVertically] = generateVBO(for:Rotation.flipVertically.textureCoordinates()) - textureVBOs[.rotateClockwiseAndFlipVertically] = generateVBO(for:Rotation.rotateClockwiseAndFlipVertically.textureCoordinates()) - textureVBOs[.rotateClockwiseAndFlipHorizontally] = generateVBO(for:Rotation.rotateClockwiseAndFlipHorizontally.textureCoordinates()) + textureVBOs[.noRotation] = generateVBO(for: Rotation.noRotation.textureCoordinates()) + textureVBOs[.rotateCounterclockwise] = generateVBO(for: Rotation.rotateCounterclockwise.textureCoordinates()) + textureVBOs[.rotateClockwise] = generateVBO(for: Rotation.rotateClockwise.textureCoordinates()) + textureVBOs[.rotate180] = generateVBO(for: Rotation.rotate180.textureCoordinates()) + textureVBOs[.flipHorizontally] = generateVBO(for: Rotation.flipHorizontally.textureCoordinates()) + textureVBOs[.flipVertically] = generateVBO(for: Rotation.flipVertically.textureCoordinates()) + textureVBOs[.rotateClockwiseAndFlipVertically] = generateVBO(for: Rotation.rotateClockwiseAndFlipVertically.textureCoordinates()) + textureVBOs[.rotateClockwiseAndFlipHorizontally] = generateVBO(for: Rotation.rotateClockwiseAndFlipHorizontally.textureCoordinates()) } - public func textureVBO(for rotation:Rotation) -> GLuint { - guard let textureVBO = textureVBOs[rotation] else {fatalError("GPUImage doesn't have a texture VBO set for the rotation \(rotation)") } + public func textureVBO(for rotation: Rotation) -> GLuint { + guard let textureVBO = textureVBOs[rotation] else { fatalError("GPUImage doesn't have a texture VBO set for the rotation \(rotation)") } return textureVBO } } -public var GPUImageLogger: (String, StaticString, UInt, StaticString) -> () = { stringToPrint, file, line, function in - Swift.print("\(stringToPrint) --> \((String(describing:file) as NSString).lastPathComponent): \(function): \(line)") +public var GPUImageLogger: (String, StaticString, UInt, StaticString) -> Void = { stringToPrint, file, line, function in + Swift.print("\(stringToPrint) --> \((String(describing: file) as NSString).lastPathComponent): \(function): \(line)") } -@_semantics("sil.optimize.never") public func debugPrint(_ stringToPrint:String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { +@_semantics("sil.optimize.never") public func debugPrint(_ stringToPrint: String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { #if DEBUG print("[GPUImage] " + stringToPrint, file: file, line: line, function: function) #endif } -@_semantics("sil.optimize.never") public func print(_ stringToPrint:String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { +@_semantics("sil.optimize.never") public func print(_ stringToPrint: String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { GPUImageLogger(stringToPrint, file, line, function) } - - diff --git a/framework/Source/OpenGLRendering.swift b/framework/Source/OpenGLRendering.swift index ba55c790..aa3ac8c4 100755 --- a/framework/Source/OpenGLRendering.swift +++ b/framework/Source/OpenGLRendering.swift @@ -23,10 +23,10 @@ public enum InputTextureStorageFormat { } public struct InputTextureProperties { - public let textureStorage:InputTextureStorageFormat - public let texture:GLuint + public let textureStorage: InputTextureStorageFormat + public let texture: GLuint - public init(textureCoordinates:[GLfloat]? = nil, textureVBO:GLuint? = nil, texture:GLuint) { + public init(textureCoordinates: [GLfloat]? = nil, textureVBO: GLuint? = nil, texture: GLuint) { self.texture = texture switch (textureCoordinates, textureVBO) { case let (.some(coordinates), .none): self.textureStorage = .textureCoordinates(coordinates) @@ -38,32 +38,32 @@ public struct InputTextureProperties { } public struct GLSize { - public let width:GLint - public let height:GLint + public let width: GLint + public let height: GLint - public init(width:GLint, height:GLint) { + public init(width: GLint, height: GLint) { self.width = width self.height = height } - public init(_ size:Size) { + public init(_ size: Size) { self.width = size.glWidth() self.height = size.glHeight() } } extension Size { - init(_ size:GLSize) { + init(_ size: GLSize) { self.width = Float(size.width) self.height = Float(size.height) } } -public let standardImageVertices:[GLfloat] = [-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0] -public let verticallyInvertedImageVertices:[GLfloat] = [-1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0] +public let standardImageVertices: [GLfloat] = [-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0] +public let verticallyInvertedImageVertices: [GLfloat] = [-1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0] // "position" and "inputTextureCoordinate", "inputTextureCoordinate2" attribute naming follows the convention of the old GPUImage -public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderUniformSettings? = nil, vertices:[GLfloat]? = nil, vertexBufferObject:GLuint? = nil, inputTextures:[InputTextureProperties], context: OpenGLContext = sharedImageProcessingContext) { +public func renderQuadWithShader(_ shader: ShaderProgram, uniformSettings: ShaderUniformSettings? = nil, vertices: [GLfloat]? = nil, vertexBufferObject: GLuint? = nil, inputTextures: [InputTextureProperties], context: OpenGLContext = sharedImageProcessingContext) { switch (vertices, vertexBufferObject) { case (.none, .some): break case (.some, .none): break @@ -77,7 +77,6 @@ public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderU guard let positionAttribute = shader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } - if let boundVBO = vertexBufferObject { glBindBuffer(GLenum(GL_ARRAY_BUFFER), boundVBO) glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, nil) @@ -96,19 +95,19 @@ public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderU glVertexAttribPointer(textureCoordinateAttribute, 2, GLenum(GL_FLOAT), 0, 0, nil) glBindBuffer(GLenum(GL_ARRAY_BUFFER), 0) } - } else if (index == 0) { + } else if index == 0 { fatalError("The required attribute named inputTextureCoordinate was missing from the shader program during rendering.") } glActiveTexture(textureUnitForIndex(index)) glBindTexture(GLenum(GL_TEXTURE_2D), inputTexture.texture) - shader.setValue(GLint(index), forUniform:"inputImageTexture".withNonZeroSuffix(index)) + shader.setValue(GLint(index), forUniform: "inputImageTexture".withNonZeroSuffix(index)) } glDrawArrays(GLenum(GL_TRIANGLE_STRIP), 0, 4) - if (vertexBufferObject != nil) { + if vertexBufferObject != nil { glBindBuffer(GLenum(GL_ARRAY_BUFFER), 0) } @@ -118,16 +117,16 @@ public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderU } } -public func clearFramebufferWithColor(_ color:Color) { +public func clearFramebufferWithColor(_ color: Color) { glClearColor(GLfloat(color.redComponent), GLfloat(color.greenComponent), GLfloat(color.blueComponent), GLfloat(color.alphaComponent)) glClear(GLenum(GL_COLOR_BUFFER_BIT)) } -func renderStencilMaskFromFramebuffer(_ framebuffer:Framebuffer) { +func renderStencilMaskFromFramebuffer(_ framebuffer: Framebuffer) { let inputTextureProperties = framebuffer.texturePropertiesForOutputRotation(.noRotation) glEnable(GLenum(GL_STENCIL_TEST)) glClearStencil(0) - glClear (GLenum(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT)) + glClear(GLenum(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT)) glColorMask(GLboolean(GL_FALSE), GLboolean(GL_FALSE), GLboolean(GL_FALSE), GLboolean(GL_FALSE)) glStencilFunc(GLenum(GL_ALWAYS), 1, 1) glStencilOp(GLenum(GL_KEEP), GLenum(GL_KEEP), GLenum(GL_REPLACE)) @@ -135,10 +134,10 @@ func renderStencilMaskFromFramebuffer(_ framebuffer:Framebuffer) { #if GL glEnable(GLenum(GL_ALPHA_TEST)) glAlphaFunc(GLenum(GL_NOTEQUAL), 0.0) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, vertices:standardImageVertices, inputTextures:[inputTextureProperties]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, vertices: standardImageVertices, inputTextures: [inputTextureProperties]) #else - let alphaTestShader = crashOnShaderCompileFailure("Stencil"){return try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader:AlphaTestFragmentShader)} - renderQuadWithShader(alphaTestShader, vertices:standardImageVertices, inputTextures:[inputTextureProperties]) + let alphaTestShader = crashOnShaderCompileFailure("Stencil") { return try sharedImageProcessingContext.programForVertexShader(OneInputVertexShader, fragmentShader: AlphaTestFragmentShader) } + renderQuadWithShader(alphaTestShader, vertices: standardImageVertices, inputTextures: [inputTextureProperties]) #endif glColorMask(GLboolean(GL_TRUE), GLboolean(GL_TRUE), GLboolean(GL_TRUE), GLboolean(GL_TRUE)) @@ -155,7 +154,7 @@ func disableStencil() { glDisable(GLenum(GL_STENCIL_TEST)) } -func textureUnitForIndex(_ index:Int) -> GLenum { +func textureUnitForIndex(_ index: Int) -> GLenum { switch index { case 0: return GLenum(GL_TEXTURE0) case 1: return GLenum(GL_TEXTURE1) @@ -170,8 +169,8 @@ func textureUnitForIndex(_ index:Int) -> GLenum { } } -public func generateTexture(minFilter:Int32, magFilter:Int32, wrapS:Int32, wrapT:Int32) -> GLuint { - var texture:GLuint = 0 +public func generateTexture(minFilter: Int32, magFilter: Int32, wrapS: Int32, wrapT: Int32) -> GLuint { + var texture: GLuint = 0 glActiveTexture(GLenum(GL_TEXTURE1)) glGenTextures(1, &texture) @@ -186,15 +185,15 @@ public func generateTexture(minFilter:Int32, magFilter:Int32, wrapS:Int32, wrapT return texture } -public func uploadLocalArray(data:[GLfloat], into texture:GLuint, size:GLSize) { +public func uploadLocalArray(data: [GLfloat], into texture: GLuint, size: GLSize) { glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, size.width, size.height, 0, GLenum(GL_RGBA), GLenum(GL_FLOAT), data) glBindTexture(GLenum(GL_TEXTURE_2D), 0) } -func generateFramebufferForTexture(_ texture:GLuint, width:GLint, height:GLint, internalFormat:Int32, format:Int32, type:Int32, stencil:Bool) throws -> (GLuint, GLuint?) { - var framebuffer:GLuint = 0 +func generateFramebufferForTexture(_ texture: GLuint, width: GLint, height: GLint, internalFormat: Int32, format: Int32, type: Int32, stencil: Bool) throws -> (GLuint, GLuint?) { + var framebuffer: GLuint = 0 glActiveTexture(GLenum(GL_TEXTURE1)) glGenFramebuffers(1, &framebuffer) @@ -205,13 +204,13 @@ func generateFramebufferForTexture(_ texture:GLuint, width:GLint, height:GLint, glFramebufferTexture2D(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_TEXTURE_2D), texture, 0) let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) - if (status != GLenum(GL_FRAMEBUFFER_COMPLETE)) { - throw FramebufferCreationError(errorCode:status) + if status != GLenum(GL_FRAMEBUFFER_COMPLETE) { + throw FramebufferCreationError(errorCode: status) } - let stencilBuffer:GLuint? + let stencilBuffer: GLuint? if stencil { - stencilBuffer = try attachStencilBuffer(width:width, height:height) + stencilBuffer = try attachStencilBuffer(width: width, height: height) } else { stencilBuffer = nil } @@ -221,9 +220,9 @@ func generateFramebufferForTexture(_ texture:GLuint, width:GLint, height:GLint, return (framebuffer, stencilBuffer) } -func attachStencilBuffer(width:GLint, height:GLint) throws -> GLuint { - var stencilBuffer:GLuint = 0 - glGenRenderbuffers(1, &stencilBuffer); +func attachStencilBuffer(width: GLint, height: GLint) throws -> GLuint { + var stencilBuffer: GLuint = 0 + glGenRenderbuffers(1, &stencilBuffer) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), stencilBuffer) glRenderbufferStorage(GLenum(GL_RENDERBUFFER), GLenum(GL_DEPTH24_STENCIL8), width, height) // iOS seems to only support combination depth + stencil, from references #if os(iOS) @@ -234,8 +233,8 @@ func attachStencilBuffer(width:GLint, height:GLint) throws -> GLuint { glBindRenderbuffer(GLenum(GL_RENDERBUFFER), 0) let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) - if (status != GLenum(GL_FRAMEBUFFER_COMPLETE)) { - throw FramebufferCreationError(errorCode:status) + if status != GLenum(GL_FRAMEBUFFER_COMPLETE) { + throw FramebufferCreationError(errorCode: status) } return stencilBuffer @@ -251,8 +250,8 @@ public func disableBlending() { glDisable(GLenum(GL_BLEND)) } -public func generateVBO(for vertices:[GLfloat]) -> GLuint { - var newBuffer:GLuint = 0 +public func generateVBO(for vertices: [GLfloat]) -> GLuint { + var newBuffer: GLuint = 0 glGenBuffers(1, &newBuffer) glBindBuffer(GLenum(GL_ARRAY_BUFFER), newBuffer) glBufferData(GLenum(GL_ARRAY_BUFFER), MemoryLayout.size * vertices.count, vertices, GLenum(GL_STATIC_DRAW)) @@ -260,13 +259,13 @@ public func generateVBO(for vertices:[GLfloat]) -> GLuint { return newBuffer } -public func deleteVBO(_ vbo:GLuint) { +public func deleteVBO(_ vbo: GLuint) { var deletedVBO = vbo glDeleteBuffers(1, &deletedVBO) } extension String { - func withNonZeroSuffix(_ suffix:Int) -> String { + func withNonZeroSuffix(_ suffix: Int) -> String { if suffix == 0 { return self } else { @@ -274,12 +273,11 @@ extension String { } } - func withGLChar(_ operation:(UnsafePointer) -> ()) { - if let value = self.cString(using:String.Encoding.utf8) { + func withGLChar(_ operation: (UnsafePointer) -> Void) { + if let value = self.cString(using: String.Encoding.utf8) { operation(UnsafePointer(value)) } else { fatalError("Could not convert this string to UTF8: \(self)") } } } - diff --git a/framework/Source/OperationGroup.swift b/framework/Source/OperationGroup.swift index b5ffcaae..791a9b1d 100644 --- a/framework/Source/OperationGroup.swift +++ b/framework/Source/OperationGroup.swift @@ -2,22 +2,22 @@ open class OperationGroup: ImageProcessingOperation { public let inputImageRelay = ImageRelay() public let outputImageRelay = ImageRelay() - public var sources:SourceContainer { get { return inputImageRelay.sources } } - public var targets:TargetContainer { get { return outputImageRelay.targets } } - public let maximumInputs:UInt = 1 + public var sources: SourceContainer { get { return inputImageRelay.sources } } + public var targets: TargetContainer { get { return outputImageRelay.targets } } + public let maximumInputs: UInt = 1 public init() { } - open func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - inputImageRelay.newFramebufferAvailable(framebuffer, fromSourceIndex:fromSourceIndex) + open func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + inputImageRelay.newFramebufferAvailable(framebuffer, fromSourceIndex: fromSourceIndex) } - public func configureGroup(_ configurationOperation:(_ input:ImageRelay, _ output:ImageRelay) -> ()) { + public func configureGroup(_ configurationOperation:(_ input: ImageRelay, _ output: ImageRelay) -> Void) { configurationOperation(inputImageRelay, outputImageRelay) } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { - outputImageRelay.transmitPreviousImage(to:target, atIndex:atIndex) + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { + outputImageRelay.transmitPreviousImage(to: target, atIndex: atIndex) } } diff --git a/framework/Source/Operations/AdaptiveThreshold.swift b/framework/Source/Operations/AdaptiveThreshold.swift index 2bac7c9d..827a835a 100644 --- a/framework/Source/Operations/AdaptiveThreshold.swift +++ b/framework/Source/Operations/AdaptiveThreshold.swift @@ -3,15 +3,15 @@ public class AdaptiveThreshold: OperationGroup { let luminance = Luminance() let boxBlur = BoxBlur() - let adaptiveThreshold = BasicOperation(fragmentShader:AdaptiveThresholdFragmentShader, numberOfInputs:2) + let adaptiveThreshold = BasicOperation(fragmentShader: AdaptiveThresholdFragmentShader, numberOfInputs: 2) public override init() { blurRadiusInPixels = 4.0 super.init() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.luminance --> self.boxBlur --> self.adaptiveThreshold --> output self.luminance --> self.adaptiveThreshold } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/AddBlend.swift b/framework/Source/Operations/AddBlend.swift index 329537a9..dd06f470 100644 --- a/framework/Source/Operations/AddBlend.swift +++ b/framework/Source/Operations/AddBlend.swift @@ -1,6 +1,5 @@ public class AddBlend: BasicOperation { - public init() { - super.init(fragmentShader:AddBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: AddBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/AlphaBlend.swift b/framework/Source/Operations/AlphaBlend.swift index c3931988..471985b6 100644 --- a/framework/Source/Operations/AlphaBlend.swift +++ b/framework/Source/Operations/AlphaBlend.swift @@ -1,9 +1,9 @@ public class AlphaBlend: BasicOperation { - public var mix:Float = 0.5 { didSet { uniformSettings["mixturePercent"] = mix } } + public var mix: Float = 0.5 { didSet { uniformSettings["mixturePercent"] = mix } } public init() { - super.init(fragmentShader:AlphaBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: AlphaBlendFragmentShader, numberOfInputs: 2) - ({mix = 0.5})() + ({ mix = 0.5 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/AmatorkaFilter.swift b/framework/Source/Operations/AmatorkaFilter.swift index 2a6d507e..6296a3c4 100755 --- a/framework/Source/Operations/AmatorkaFilter.swift +++ b/framework/Source/Operations/AmatorkaFilter.swift @@ -12,12 +12,11 @@ public class AmatorkaFilter: LookupFilter { super.init() do { - try ({lookupImage = try PictureInput(imageName:"lookup_amatorka.png")})() - } - catch { + try ({ lookupImage = try PictureInput(imageName: "lookup_amatorka.png") })() + } catch { print("ERROR: Unable to create PictureInput \(error)") } - ({intensity = 1.0})() + ({ intensity = 1.0 })() } } #endif diff --git a/framework/Source/Operations/AverageColorExtractor.swift b/framework/Source/Operations/AverageColorExtractor.swift index ccf86608..ad3c3779 100755 --- a/framework/Source/Operations/AverageColorExtractor.swift +++ b/framework/Source/Operations/AverageColorExtractor.swift @@ -16,19 +16,19 @@ import Glibc import Foundation public class AverageColorExtractor: BasicOperation { - public var extractedColorCallback:((Color) -> ())? + public var extractedColorCallback: ((Color) -> Void)? public init() { - super.init(vertexShader:AverageColorVertexShader, fragmentShader:AverageColorFragmentShader) + super.init(vertexShader: AverageColorVertexShader, fragmentShader: AverageColorFragmentShader) } override open func renderFrame() { - averageColorBySequentialReduction(inputFramebuffer:inputFramebuffers[0]!, shader:shader, extractAverageOperation:extractAverageColorFromFramebuffer) + averageColorBySequentialReduction(inputFramebuffer: inputFramebuffers[0]!, shader: shader, extractAverageOperation: extractAverageColorFromFramebuffer) releaseIncomingFramebuffers() } - func extractAverageColorFromFramebuffer(_ framebuffer:Framebuffer) { - var data = [UInt8](repeating:0, count:Int(framebuffer.size.width * framebuffer.size.height * 4)) + func extractAverageColorFromFramebuffer(_ framebuffer: Framebuffer) { + var data = [UInt8](repeating: 0, count: Int(framebuffer.size.width * framebuffer.size.height * 4)) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), &data) renderFramebuffer = framebuffer framebuffer.resetRetainCount() @@ -43,13 +43,13 @@ public class AverageColorExtractor: BasicOperation { alphaTotal += Int(data[(currentPixel * 4) + 3]) } - let returnColor = Color(red:Float(redTotal) / Float(totalNumberOfPixels) / 255.0, green:Float(greenTotal) / Float(totalNumberOfPixels) / 255.0, blue:Float(blueTotal) / Float(totalNumberOfPixels) / 255.0, alpha:Float(alphaTotal) / Float(totalNumberOfPixels) / 255.0) + let returnColor = Color(red: Float(redTotal) / Float(totalNumberOfPixels) / 255.0, green: Float(greenTotal) / Float(totalNumberOfPixels) / 255.0, blue: Float(blueTotal) / Float(totalNumberOfPixels) / 255.0, alpha: Float(alphaTotal) / Float(totalNumberOfPixels) / 255.0) extractedColorCallback?(returnColor) } } -func averageColorBySequentialReduction(inputFramebuffer:Framebuffer, shader:ShaderProgram, extractAverageOperation:(Framebuffer) -> ()) { +func averageColorBySequentialReduction(inputFramebuffer: Framebuffer, shader: ShaderProgram, extractAverageOperation: (Framebuffer) -> Void) { var uniformSettings = ShaderUniformSettings() let inputSize = Size(inputFramebuffer.size) let numberOfReductionsInX = floor(log(Double(inputSize.width)) / log(4.0)) @@ -58,14 +58,14 @@ func averageColorBySequentialReduction(inputFramebuffer:Framebuffer, shader:Shad inputFramebuffer.lock() var previousFramebuffer = inputFramebuffer for currentReduction in 0.. ())? + public var extractedLuminanceCallback: ((Float) -> Void)? public init() { - super.init(vertexShader:AverageColorVertexShader, fragmentShader:AverageLuminanceFragmentShader) + super.init(vertexShader: AverageColorVertexShader, fragmentShader: AverageLuminanceFragmentShader) } override open func renderFrame() { // Reduce to luminance before passing into the downsampling // TODO: Combine this with the first stage of the downsampling by doing reduction here - let luminancePassShader = crashOnShaderCompileFailure("AverageLuminance"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:LuminanceFragmentShader)} - let luminancePassFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:inputFramebuffers[0]!.orientation, size:inputFramebuffers[0]!.size) + let luminancePassShader = crashOnShaderCompileFailure("AverageLuminance") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader: LuminanceFragmentShader) } + let luminancePassFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: inputFramebuffers[0]!.orientation, size: inputFramebuffers[0]!.size) luminancePassFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(luminancePassShader, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[inputFramebuffers[0]!.texturePropertiesForTargetOrientation(luminancePassFramebuffer.orientation)]) + renderQuadWithShader(luminancePassShader, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [inputFramebuffers[0]!.texturePropertiesForTargetOrientation(luminancePassFramebuffer.orientation)]) - averageColorBySequentialReduction(inputFramebuffer:luminancePassFramebuffer, shader:shader, extractAverageOperation:extractAverageLuminanceFromFramebuffer) + averageColorBySequentialReduction(inputFramebuffer: luminancePassFramebuffer, shader: shader, extractAverageOperation: extractAverageLuminanceFromFramebuffer) releaseIncomingFramebuffers() } - func extractAverageLuminanceFromFramebuffer(_ framebuffer:Framebuffer) { - var data = [UInt8](repeating:0, count:Int(framebuffer.size.width * framebuffer.size.height * 4)) + func extractAverageLuminanceFromFramebuffer(_ framebuffer: Framebuffer) { + var data = [UInt8](repeating: 0, count: Int(framebuffer.size.width * framebuffer.size.height * 4)) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), &data) renderFramebuffer = framebuffer framebuffer.resetRetainCount() diff --git a/framework/Source/Operations/AverageLuminanceThreshold.swift b/framework/Source/Operations/AverageLuminanceThreshold.swift index 8e086c5f..fbf2d76c 100644 --- a/framework/Source/Operations/AverageLuminanceThreshold.swift +++ b/framework/Source/Operations/AverageLuminanceThreshold.swift @@ -1,5 +1,5 @@ public class AverageLuminanceThreshold: OperationGroup { - public var thresholdMultiplier:Float = 1.0 + public var thresholdMultiplier: Float = 1.0 let averageLuminance = AverageLuminanceExtractor() let luminanceThreshold = LuminanceThreshold() @@ -11,9 +11,9 @@ public class AverageLuminanceThreshold: OperationGroup { self?.luminanceThreshold.threshold = (self?.thresholdMultiplier ?? 1.0) * luminance } - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.averageLuminance input --> self.luminanceThreshold --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/BilateralBlur.swift b/framework/Source/Operations/BilateralBlur.swift index ca47c9d7..2f97d601 100644 --- a/framework/Source/Operations/BilateralBlur.swift +++ b/framework/Source/Operations/BilateralBlur.swift @@ -1,12 +1,12 @@ // TODO: auto-generate shaders for this, per the Gaussian blur method public class BilateralBlur: TwoStageOperation { - public var distanceNormalizationFactor:Float = 8.0 { didSet { uniformSettings["distanceNormalizationFactor"] = distanceNormalizationFactor } } + public var distanceNormalizationFactor: Float = 8.0 { didSet { uniformSettings["distanceNormalizationFactor"] = distanceNormalizationFactor } } public init() { - super.init(vertexShader:BilateralBlurVertexShader, fragmentShader:BilateralBlurFragmentShader) + super.init(vertexShader: BilateralBlurVertexShader, fragmentShader: BilateralBlurFragmentShader) downsamplingFactor = 4.0 - ({distanceNormalizationFactor = 1.0})() + ({ distanceNormalizationFactor = 1.0 })() } } diff --git a/framework/Source/Operations/BoxBlur.swift b/framework/Source/Operations/BoxBlur.swift index 9e50a90a..d1eb3086 100755 --- a/framework/Source/Operations/BoxBlur.swift +++ b/framework/Source/Operations/BoxBlur.swift @@ -5,13 +5,13 @@ import Glibc import Foundation public class BoxBlur: TwoStageOperation { - public var blurRadiusInPixels:Float { + public var blurRadiusInPixels: Float { didSet { - let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: overrideDownsamplingOptimization) sharedImageProcessingContext.runOperationAsynchronously { self.downsamplingFactor = downsamplingFactor let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - self.shader = crashOnShaderCompileFailure("BoxBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader:fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius))} + self.shader = crashOnShaderCompileFailure("BoxBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader: fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius)) } } } } @@ -19,13 +19,13 @@ public class BoxBlur: TwoStageOperation { public init() { blurRadiusInPixels = 2.0 let pixelRadius = UInt(round(round(Double(blurRadiusInPixels) / 2.0) * 2.0)) - let initialShader = crashOnShaderCompileFailure("BoxBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader:fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius))} - super.init(shader:initialShader, numberOfInputs:1) + let initialShader = crashOnShaderCompileFailure("BoxBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader: fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius)) } + super.init(shader: initialShader, numberOfInputs: 1) } } -func vertexShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { - guard (radius > 0) else { return OneInputVertexShader } +func vertexShaderForOptimizedBoxBlurOfRadius(_ radius: UInt) -> String { + guard radius > 0 else { return OneInputVertexShader } let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) var shaderString = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\(1 + (numberOfOptimizedOffsets * 2))];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" @@ -40,8 +40,8 @@ func vertexShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { return shaderString } -func fragmentShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { - guard (radius > 0) else { return PassthroughFragmentShader } +func fragmentShaderForOptimizedBoxBlurOfRadius(_ radius: UInt) -> String { + guard radius > 0 else { return PassthroughFragmentShader } let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) let trueNumberOfOptimizedOffsets = radius / 2 + (radius % 2) @@ -62,7 +62,7 @@ func fragmentShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { } // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader - if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets) { + if trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets { #if GLES shaderString += "highp vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" #else diff --git a/framework/Source/Operations/BrightnessAdjustment.swift b/framework/Source/Operations/BrightnessAdjustment.swift index c0c2ed03..e8547ba1 100644 --- a/framework/Source/Operations/BrightnessAdjustment.swift +++ b/framework/Source/Operations/BrightnessAdjustment.swift @@ -1,9 +1,9 @@ public class BrightnessAdjustment: BasicOperation { - public var brightness:Float = 0.0 { didSet { uniformSettings["brightness"] = brightness } } + public var brightness: Float = 0.0 { didSet { uniformSettings["brightness"] = brightness } } public init() { - super.init(fragmentShader:BrightnessFragmentShader, numberOfInputs:1) + super.init(fragmentShader: BrightnessFragmentShader, numberOfInputs: 1) - ({brightness = 1.0})() + ({ brightness = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/BulgeDistortion.swift b/framework/Source/Operations/BulgeDistortion.swift index adf3d2b2..8a577595 100644 --- a/framework/Source/Operations/BulgeDistortion.swift +++ b/framework/Source/Operations/BulgeDistortion.swift @@ -1,13 +1,13 @@ public class BulgeDistortion: BasicOperation { - public var radius:Float = 0.25 { didSet { uniformSettings["radius"] = radius } } - public var scale:Float = 0.5 { didSet { uniformSettings["scale"] = scale } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var radius: Float = 0.25 { didSet { uniformSettings["radius"] = radius } } + public var scale: Float = 0.5 { didSet { uniformSettings["scale"] = scale } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:BulgeDistortionFragmentShader, numberOfInputs:1) + super.init(fragmentShader: BulgeDistortionFragmentShader, numberOfInputs: 1) - ({radius = 0.25})() - ({scale = 0.5})() - ({center = Position.center})() + ({ radius = 0.25 })() + ({ scale = 0.5 })() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/CGAColorspaceFilter.swift b/framework/Source/Operations/CGAColorspaceFilter.swift index 6cf16907..4ec6ac75 100644 --- a/framework/Source/Operations/CGAColorspaceFilter.swift +++ b/framework/Source/Operations/CGAColorspaceFilter.swift @@ -1,5 +1,5 @@ public class CGAColorspaceFilter: BasicOperation { public init() { - super.init(fragmentShader:CGAColorspaceFragmentShader, numberOfInputs:1) + super.init(fragmentShader: CGAColorspaceFragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/CannyEdgeDetection.swift b/framework/Source/Operations/CannyEdgeDetection.swift index fbe1bc48..b347f0d5 100644 --- a/framework/Source/Operations/CannyEdgeDetection.swift +++ b/framework/Source/Operations/CannyEdgeDetection.swift @@ -13,25 +13,25 @@ */ public class CannyEdgeDetection: OperationGroup { - public var blurRadiusInPixels:Float = 2.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } - public var upperThreshold:Float = 0.4 { didSet { directionalNonMaximumSuppression.uniformSettings["upperThreshold"] = upperThreshold } } - public var lowerThreshold:Float = 0.1 { didSet { directionalNonMaximumSuppression.uniformSettings["lowerThreshold"] = lowerThreshold } } + public var blurRadiusInPixels: Float = 2.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var upperThreshold: Float = 0.4 { didSet { directionalNonMaximumSuppression.uniformSettings["upperThreshold"] = upperThreshold } } + public var lowerThreshold: Float = 0.1 { didSet { directionalNonMaximumSuppression.uniformSettings["lowerThreshold"] = lowerThreshold } } let luminance = Luminance() let gaussianBlur = SingleComponentGaussianBlur() - let directionalSobel = TextureSamplingOperation(fragmentShader:DirectionalSobelEdgeDetectionFragmentShader) - let directionalNonMaximumSuppression = TextureSamplingOperation(vertexShader:OneInputVertexShader, fragmentShader:DirectionalNonMaximumSuppressionFragmentShader) - let weakPixelInclusion = TextureSamplingOperation(fragmentShader:WeakPixelInclusionFragmentShader) + let directionalSobel = TextureSamplingOperation(fragmentShader: DirectionalSobelEdgeDetectionFragmentShader) + let directionalNonMaximumSuppression = TextureSamplingOperation(vertexShader: OneInputVertexShader, fragmentShader: DirectionalNonMaximumSuppressionFragmentShader) + let weakPixelInclusion = TextureSamplingOperation(fragmentShader: WeakPixelInclusionFragmentShader) public override init() { super.init() - ({blurRadiusInPixels = 2.0})() - ({upperThreshold = 0.4})() - ({lowerThreshold = 0.1})() + ({ blurRadiusInPixels = 2.0 })() + ({ upperThreshold = 0.4 })() + ({ lowerThreshold = 0.1 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.luminance --> self.gaussianBlur --> self.directionalSobel --> self.directionalNonMaximumSuppression --> self.weakPixelInclusion --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ChromaKeyBlend.swift b/framework/Source/Operations/ChromaKeyBlend.swift index 940ad0cf..f4505c6e 100644 --- a/framework/Source/Operations/ChromaKeyBlend.swift +++ b/framework/Source/Operations/ChromaKeyBlend.swift @@ -1,13 +1,13 @@ public class ChromaKeyBlend: BasicOperation { - public var thresholdSensitivity:Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } - public var smoothing:Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } - public var colorToReplace:Color = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } + public var thresholdSensitivity: Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } + public var smoothing: Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } + public var colorToReplace = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } public init() { - super.init(fragmentShader:ChromaKeyBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ChromaKeyBlendFragmentShader, numberOfInputs: 2) - ({thresholdSensitivity = 0.4})() - ({smoothing = 0.1})() - ({colorToReplace = Color.green})() + ({ thresholdSensitivity = 0.4 })() + ({ smoothing = 0.1 })() + ({ colorToReplace = Color.green })() } } diff --git a/framework/Source/Operations/ChromaKeying.swift b/framework/Source/Operations/ChromaKeying.swift index 519f29e4..66d5f31e 100644 --- a/framework/Source/Operations/ChromaKeying.swift +++ b/framework/Source/Operations/ChromaKeying.swift @@ -1,13 +1,13 @@ public class ChromaKeying: BasicOperation { - public var thresholdSensitivity:Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } - public var smoothing:Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } - public var colorToReplace:Color = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } + public var thresholdSensitivity: Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } + public var smoothing: Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } + public var colorToReplace = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } public init() { - super.init(fragmentShader:ChromaKeyFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ChromaKeyFragmentShader, numberOfInputs: 1) - ({thresholdSensitivity = 0.4})() - ({smoothing = 0.1})() - ({colorToReplace = Color.green})() + ({ thresholdSensitivity = 0.4 })() + ({ smoothing = 0.1 })() + ({ colorToReplace = Color.green })() } } diff --git a/framework/Source/Operations/CircleGenerator.swift b/framework/Source/Operations/CircleGenerator.swift index 84a5f593..9cbb138b 100644 --- a/framework/Source/Operations/CircleGenerator.swift +++ b/framework/Source/Operations/CircleGenerator.swift @@ -13,15 +13,15 @@ #endif public class CircleGenerator: ImageGenerator { - let circleShader:ShaderProgram + let circleShader: ShaderProgram - public override init(size:Size) { - circleShader = crashOnShaderCompileFailure("CircleGenerator"){try sharedImageProcessingContext.programForVertexShader(CircleVertexShader, fragmentShader:CircleFragmentShader)} + public override init(size: Size) { + circleShader = crashOnShaderCompileFailure("CircleGenerator") { try sharedImageProcessingContext.programForVertexShader(CircleVertexShader, fragmentShader: CircleFragmentShader) } circleShader.colorUniformsUseFourComponents = true - super.init(size:size) + super.init(size: size) } - public func renderCircleOfRadius(_ radius:Float, center:Position, circleColor:Color = Color.white, backgroundColor:Color = Color.black) { + public func renderCircleOfRadius(_ radius: Float, center: Position, circleColor: Color = Color.white, backgroundColor: Color = Color.black) { let scaledRadius = radius * 2.0 imageFramebuffer.activateFramebufferForRendering() var uniformSettings = ShaderUniformSettings() @@ -35,7 +35,7 @@ public class CircleGenerator: ImageGenerator { let scaledYRadius = scaledRadius / imageFramebuffer.aspectRatioForRotation(.noRotation) uniformSettings["center"] = Position(convertedCenterX, convertedCenterY) - let circleVertices:[GLfloat] = [GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY + scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY + scaledYRadius)] + let circleVertices: [GLfloat] = [GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY + scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY + scaledYRadius)] clearFramebufferWithColor(backgroundColor) circleShader.use() diff --git a/framework/Source/Operations/ClosingFilter.swift b/framework/Source/Operations/ClosingFilter.swift index 966b7157..70d6fa87 100644 --- a/framework/Source/Operations/ClosingFilter.swift +++ b/framework/Source/Operations/ClosingFilter.swift @@ -1,5 +1,5 @@ public class ClosingFilter: OperationGroup { - public var radius:UInt { + public var radius: UInt { didSet { erosion.radius = radius dilation.radius = radius @@ -12,8 +12,8 @@ public class ClosingFilter: OperationGroup { radius = 1 super.init() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.dilation --> self.erosion --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorBlend.swift b/framework/Source/Operations/ColorBlend.swift index 4a81b3f3..5bf347a1 100644 --- a/framework/Source/Operations/ColorBlend.swift +++ b/framework/Source/Operations/ColorBlend.swift @@ -1,5 +1,5 @@ public class ColorBlend: BasicOperation { public init() { - super.init(fragmentShader:ColorBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ColorBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorBurnBlend.swift b/framework/Source/Operations/ColorBurnBlend.swift index ea81cd8e..a27b7619 100644 --- a/framework/Source/Operations/ColorBurnBlend.swift +++ b/framework/Source/Operations/ColorBurnBlend.swift @@ -1,5 +1,5 @@ public class ColorBurnBlend: BasicOperation { public init() { - super.init(fragmentShader:ColorBurnBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ColorBurnBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorDodgeBlend.swift b/framework/Source/Operations/ColorDodgeBlend.swift index 79fe7bfd..900d6411 100644 --- a/framework/Source/Operations/ColorDodgeBlend.swift +++ b/framework/Source/Operations/ColorDodgeBlend.swift @@ -1,5 +1,5 @@ public class ColorDodgeBlend: BasicOperation { public init() { - super.init(fragmentShader:ColorDodgeBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ColorDodgeBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorInversion.swift b/framework/Source/Operations/ColorInversion.swift index 7f0ad408..8fcffe65 100644 --- a/framework/Source/Operations/ColorInversion.swift +++ b/framework/Source/Operations/ColorInversion.swift @@ -1,5 +1,5 @@ public class ColorInversion: BasicOperation { public init() { - super.init(fragmentShader:ColorInvertFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ColorInvertFragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorLocalBinaryPattern.swift b/framework/Source/Operations/ColorLocalBinaryPattern.swift index 20cbe9e1..d74bfb26 100644 --- a/framework/Source/Operations/ColorLocalBinaryPattern.swift +++ b/framework/Source/Operations/ColorLocalBinaryPattern.swift @@ -14,6 +14,6 @@ public class ColorLocalBinaryPattern: TextureSamplingOperation { public init() { - super.init(fragmentShader:ColorLocalBinaryPatternFragmentShader) + super.init(fragmentShader: ColorLocalBinaryPatternFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ColorMatrixFilter.swift b/framework/Source/Operations/ColorMatrixFilter.swift index a8a344ff..a35075f5 100644 --- a/framework/Source/Operations/ColorMatrixFilter.swift +++ b/framework/Source/Operations/ColorMatrixFilter.swift @@ -1,12 +1,11 @@ public class ColorMatrixFilter: BasicOperation { - public var intensity:Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } - public var colorMatrix:Matrix4x4 = Matrix4x4.identity { didSet { uniformSettings["colorMatrix"] = colorMatrix } } + public var intensity: Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } + public var colorMatrix = Matrix4x4.identity { didSet { uniformSettings["colorMatrix"] = colorMatrix } } public init() { + super.init(fragmentShader: ColorMatrixFragmentShader, numberOfInputs: 1) - super.init(fragmentShader:ColorMatrixFragmentShader, numberOfInputs:1) - - ({intensity = 1.0})() - ({colorMatrix = Matrix4x4.identity})() + ({ intensity = 1.0 })() + ({ colorMatrix = Matrix4x4.identity })() } } diff --git a/framework/Source/Operations/ColourFASTFeatureDetection.swift b/framework/Source/Operations/ColourFASTFeatureDetection.swift index 8582d0de..1e0887cb 100644 --- a/framework/Source/Operations/ColourFASTFeatureDetection.swift +++ b/framework/Source/Operations/ColourFASTFeatureDetection.swift @@ -8,17 +8,17 @@ // TODO: Have the blur radius and texel spacing be tied together into a general sampling distance scale factor public class ColourFASTFeatureDetection: OperationGroup { - public var blurRadiusInPixels:Float = 2.0 { didSet { boxBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var blurRadiusInPixels: Float = 2.0 { didSet { boxBlur.blurRadiusInPixels = blurRadiusInPixels } } let boxBlur = BoxBlur() - let colourFASTFeatureDescriptors = TextureSamplingOperation(vertexShader:ColourFASTDecriptorVertexShader, fragmentShader:ColourFASTDecriptorFragmentShader, numberOfInputs:2) + let colourFASTFeatureDescriptors = TextureSamplingOperation(vertexShader: ColourFASTDecriptorVertexShader, fragmentShader: ColourFASTDecriptorFragmentShader, numberOfInputs: 2) public override init() { super.init() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.colourFASTFeatureDescriptors input --> self.boxBlur --> self.colourFASTFeatureDescriptors --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ContrastAdjustment.swift b/framework/Source/Operations/ContrastAdjustment.swift index 3b3ba561..0319eb14 100644 --- a/framework/Source/Operations/ContrastAdjustment.swift +++ b/framework/Source/Operations/ContrastAdjustment.swift @@ -1,9 +1,9 @@ public class ContrastAdjustment: BasicOperation { - public var contrast:Float = 1.0 { didSet { uniformSettings["contrast"] = contrast } } + public var contrast: Float = 1.0 { didSet { uniformSettings["contrast"] = contrast } } public init() { - super.init(fragmentShader:ContrastFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ContrastFragmentShader, numberOfInputs: 1) - ({contrast = 1.0})() + ({ contrast = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Convolution3x3.swift b/framework/Source/Operations/Convolution3x3.swift index 2ea7a6bc..2eaa9664 100644 --- a/framework/Source/Operations/Convolution3x3.swift +++ b/framework/Source/Operations/Convolution3x3.swift @@ -1,9 +1,9 @@ public class Convolution3x3: TextureSamplingOperation { - public var convolutionKernel:Matrix3x3 = Matrix3x3.centerOnly { didSet { uniformSettings["convolutionMatrix"] = convolutionKernel } } + public var convolutionKernel = Matrix3x3.centerOnly { didSet { uniformSettings["convolutionMatrix"] = convolutionKernel } } public init() { - super.init(fragmentShader:Convolution3x3FragmentShader) + super.init(fragmentShader: Convolution3x3FragmentShader) - ({convolutionKernel = Matrix3x3.centerOnly})() + ({ convolutionKernel = Matrix3x3.centerOnly })() } } diff --git a/framework/Source/Operations/Crop.swift b/framework/Source/Operations/Crop.swift index 7118af33..a069f823 100644 --- a/framework/Source/Operations/Crop.swift +++ b/framework/Source/Operations/Crop.swift @@ -6,43 +6,43 @@ open class Crop: BasicOperation { public var locationOfCropInPixels: Position? public init() { - super.init(fragmentShader:PassthroughFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1) } override open func renderFrame() { - let inputFramebuffer:Framebuffer = inputFramebuffers[0]! + let inputFramebuffer: Framebuffer = inputFramebuffers[0]! let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait) - let finalCropSize:GLSize - let normalizedOffsetFromOrigin:Position + let finalCropSize: GLSize + let normalizedOffsetFromOrigin: Position if let cropSize = cropSizeInPixels, let locationOfCrop = locationOfCropInPixels { let glCropSize = GLSize(cropSize) - finalCropSize = GLSize(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) + finalCropSize = GLSize(width: min(inputSize.width, glCropSize.width), height: min(inputSize.height, glCropSize.height)) normalizedOffsetFromOrigin = Position(locationOfCrop.x / Float(inputSize.width), locationOfCrop.y / Float(inputSize.height)) } else if let cropSize = cropSizeInPixels { let glCropSize = GLSize(cropSize) - finalCropSize = GLSize(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) + finalCropSize = GLSize(width: min(inputSize.width, glCropSize.width), height: min(inputSize.height, glCropSize.height)) normalizedOffsetFromOrigin = Position(Float(inputSize.width / 2 - finalCropSize.width / 2) / Float(inputSize.width), Float(inputSize.height / 2 - finalCropSize.height / 2) / Float(inputSize.height)) } else { finalCropSize = inputSize - normalizedOffsetFromOrigin = Position.zero + normalizedOffsetFromOrigin = Position.zero } - let normalizedCropSize = Size(width:Float(finalCropSize.width) / Float(inputSize.width), height:Float(finalCropSize.height) / Float(inputSize.height)) + let normalizedCropSize = Size(width: Float(finalCropSize.width) / Float(inputSize.width), height: Float(finalCropSize.height) / Float(inputSize.height)) - let bufferSize:GLSize - if abs(abs(Double(inputSize.width)/Double(inputSize.height)) - abs(Double(finalCropSize.width)/Double(finalCropSize.height))) < 0.01 { + let bufferSize: GLSize + if abs(abs(Double(inputSize.width) / Double(inputSize.height)) - abs(Double(finalCropSize.width) / Double(finalCropSize.height))) < 0.01 { bufferSize = inputSize } else { bufferSize = finalCropSize } - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:bufferSize, stencil:false) + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: bufferSize, stencil: false) - let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) + let textureProperties = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin: normalizedOffsetFromOrigin, cropSize: normalizedCropSize), texture: inputFramebuffer.texture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[textureProperties]) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [textureProperties]) releaseIncomingFramebuffers() } } diff --git a/framework/Source/Operations/CrosshairGenerator.swift b/framework/Source/Operations/CrosshairGenerator.swift index 0960a3ae..6c499591 100644 --- a/framework/Source/Operations/CrosshairGenerator.swift +++ b/framework/Source/Operations/CrosshairGenerator.swift @@ -13,23 +13,21 @@ #endif public class CrosshairGenerator: ImageGenerator { - - public var crosshairWidth:Float = 5.0 { didSet { uniformSettings["crosshairWidth"] = crosshairWidth } } - public var crosshairColor:Color = Color.green { didSet { uniformSettings["crosshairColor"] = crosshairColor } } + public var crosshairWidth: Float = 5.0 { didSet { uniformSettings["crosshairWidth"] = crosshairWidth } } + public var crosshairColor = Color.green { didSet { uniformSettings["crosshairColor"] = crosshairColor } } - let crosshairShader:ShaderProgram + let crosshairShader: ShaderProgram var uniformSettings = ShaderUniformSettings() - public override init(size:Size) { - crosshairShader = crashOnShaderCompileFailure("CrosshairGenerator"){try sharedImageProcessingContext.programForVertexShader(CrosshairVertexShader, fragmentShader:CrosshairFragmentShader)} - super.init(size:size) + public override init(size: Size) { + crosshairShader = crashOnShaderCompileFailure("CrosshairGenerator") { try sharedImageProcessingContext.programForVertexShader(CrosshairVertexShader, fragmentShader: CrosshairFragmentShader) } + super.init(size: size) - ({crosshairWidth = 5.0})() - ({crosshairColor = Color.green})() + ({ crosshairWidth = 5.0 })() + ({ crosshairColor = Color.green })() } - - public func renderCrosshairs(_ positions:[Position]) { + public func renderCrosshairs(_ positions: [Position]) { imageFramebuffer.activateFramebufferForRendering() imageFramebuffer.timingStyle = .stillImage #if GL @@ -46,7 +44,7 @@ public class CrosshairGenerator: ImageGenerator { guard let positionAttribute = crosshairShader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } - let convertedPositions = positions.flatMap{$0.toGLArray()} + let convertedPositions = positions.flatMap { $0.toGLArray() } glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, convertedPositions) glDrawArrays(GLenum(GL_POINTS), 0, GLsizei(positions.count)) diff --git a/framework/Source/Operations/Crosshatch.swift b/framework/Source/Operations/Crosshatch.swift index a12b1cb9..77bc1167 100644 --- a/framework/Source/Operations/Crosshatch.swift +++ b/framework/Source/Operations/Crosshatch.swift @@ -1,11 +1,11 @@ public class Crosshatch: BasicOperation { - public var crossHatchSpacing:Float = 0.03 { didSet { uniformSettings["crossHatchSpacing"] = crossHatchSpacing } } - public var lineWidth:Float = 0.003 { didSet { uniformSettings["lineWidth"] = lineWidth } } + public var crossHatchSpacing: Float = 0.03 { didSet { uniformSettings["crossHatchSpacing"] = crossHatchSpacing } } + public var lineWidth: Float = 0.003 { didSet { uniformSettings["lineWidth"] = lineWidth } } public init() { - super.init(fragmentShader:CrosshatchFragmentShader, numberOfInputs:1) + super.init(fragmentShader: CrosshatchFragmentShader, numberOfInputs: 1) - ({crossHatchSpacing = 0.03})() - ({lineWidth = 0.003})() + ({ crossHatchSpacing = 0.03 })() + ({ lineWidth = 0.003 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/DarkenBlend.swift b/framework/Source/Operations/DarkenBlend.swift index c90c7fee..c446433b 100644 --- a/framework/Source/Operations/DarkenBlend.swift +++ b/framework/Source/Operations/DarkenBlend.swift @@ -1,5 +1,5 @@ public class DarkenBlend: BasicOperation { public init() { - super.init(fragmentShader:DarkenBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: DarkenBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/DifferenceBlend.swift b/framework/Source/Operations/DifferenceBlend.swift index 7e3ba524..58fd559b 100644 --- a/framework/Source/Operations/DifferenceBlend.swift +++ b/framework/Source/Operations/DifferenceBlend.swift @@ -1,5 +1,5 @@ public class DifferenceBlend: BasicOperation { public init() { - super.init(fragmentShader:DifferenceBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: DifferenceBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Dilation.swift b/framework/Source/Operations/Dilation.swift index 2e180150..2632518b 100644 --- a/framework/Source/Operations/Dilation.swift +++ b/framework/Source/Operations/Dilation.swift @@ -1,24 +1,24 @@ public class Dilation: TwoStageOperation { - public var radius:UInt { + public var radius: UInt { didSet { switch radius { case 0, 1: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader:Dilation1FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader: Dilation1FragmentShader) } case 2: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation2VertexShader, fragmentShader:Dilation2FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation2VertexShader, fragmentShader: Dilation2FragmentShader) } case 3: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation3VertexShader, fragmentShader:Dilation3FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation3VertexShader, fragmentShader: Dilation3FragmentShader) } case 4: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader:Dilation4FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader: Dilation4FragmentShader) } default: - shader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader:Dilation4FragmentShader)} + shader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader: Dilation4FragmentShader) } } } } public init() { radius = 1 - let initialShader = crashOnShaderCompileFailure("Dilation"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader:Dilation1FragmentShader)} - super.init(shader:initialShader, numberOfInputs:1) + let initialShader = crashOnShaderCompileFailure("Dilation") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader: Dilation1FragmentShader) } + super.init(shader: initialShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/DissolveBlend.swift b/framework/Source/Operations/DissolveBlend.swift index 0b4f5d51..51d788e4 100755 --- a/framework/Source/Operations/DissolveBlend.swift +++ b/framework/Source/Operations/DissolveBlend.swift @@ -1,9 +1,9 @@ public class DissolveBlend: BasicOperation { - public var mix:Float = 0.5 { didSet { uniformSettings["mixturePercent"] = mix } } + public var mix: Float = 0.5 { didSet { uniformSettings["mixturePercent"] = mix } } public init() { - super.init(fragmentShader:DissolveBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: DissolveBlendFragmentShader, numberOfInputs: 2) - ({mix = 0.5})() + ({ mix = 0.5 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/DivideBlend.swift b/framework/Source/Operations/DivideBlend.swift index 599f9526..6df6d537 100644 --- a/framework/Source/Operations/DivideBlend.swift +++ b/framework/Source/Operations/DivideBlend.swift @@ -1,5 +1,5 @@ public class DivideBlend: BasicOperation { public init() { - super.init(fragmentShader:DivideBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: DivideBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/EmbossFilter.swift b/framework/Source/Operations/EmbossFilter.swift index 16e6cbe6..5cc4e81a 100644 --- a/framework/Source/Operations/EmbossFilter.swift +++ b/framework/Source/Operations/EmbossFilter.swift @@ -1,7 +1,7 @@ -public class EmbossFilter : Convolution3x3 { - public var intensity:Float = 1.0 { +public class EmbossFilter: Convolution3x3 { + public var intensity: Float = 1.0 { didSet { - self.convolutionKernel = Matrix3x3(rowMajorValues:[ + self.convolutionKernel = Matrix3x3(rowMajorValues: [ intensity * (-2.0), -intensity, 0.0, -intensity, 1.0, intensity, 0.0, intensity, intensity * 2.0]) @@ -11,6 +11,6 @@ public class EmbossFilter : Convolution3x3 { public override init() { super.init() - ({intensity = 1.0})() + ({ intensity = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Erosion.swift b/framework/Source/Operations/Erosion.swift index e5d7ccd6..590f124e 100644 --- a/framework/Source/Operations/Erosion.swift +++ b/framework/Source/Operations/Erosion.swift @@ -1,24 +1,24 @@ public class Erosion: TwoStageOperation { - public var radius:UInt { + public var radius: UInt { didSet { switch radius { case 0, 1: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader:Erosion1FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader: Erosion1FragmentShader) } case 2: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation2VertexShader, fragmentShader:Erosion2FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation2VertexShader, fragmentShader: Erosion2FragmentShader) } case 3: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation3VertexShader, fragmentShader:Erosion3FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation3VertexShader, fragmentShader: Erosion3FragmentShader) } case 4: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader:Erosion4FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader: Erosion4FragmentShader) } default: - shader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader:Erosion4FragmentShader)} + shader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation4VertexShader, fragmentShader: Erosion4FragmentShader) } } } } public init() { radius = 1 - let initialShader = crashOnShaderCompileFailure("Erosion"){try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader:Erosion1FragmentShader)} - super.init(shader:initialShader, numberOfInputs:1) + let initialShader = crashOnShaderCompileFailure("Erosion") { try sharedImageProcessingContext.programForVertexShader(ErosionDilation1VertexShader, fragmentShader: Erosion1FragmentShader) } + super.init(shader: initialShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ExclusionBlend.swift b/framework/Source/Operations/ExclusionBlend.swift index 52123038..8a0755c3 100644 --- a/framework/Source/Operations/ExclusionBlend.swift +++ b/framework/Source/Operations/ExclusionBlend.swift @@ -1,5 +1,5 @@ public class ExclusionBlend: BasicOperation { public init() { - super.init(fragmentShader:ExclusionBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ExclusionBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ExposureAdjustment.swift b/framework/Source/Operations/ExposureAdjustment.swift index 6f87488a..26824baa 100644 --- a/framework/Source/Operations/ExposureAdjustment.swift +++ b/framework/Source/Operations/ExposureAdjustment.swift @@ -1,9 +1,9 @@ public class ExposureAdjustment: BasicOperation { - public var exposure:Float = 0.0 { didSet { uniformSettings["exposure"] = exposure } } + public var exposure: Float = 0.0 { didSet { uniformSettings["exposure"] = exposure } } public init() { - super.init(fragmentShader:ExposureFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ExposureFragmentShader, numberOfInputs: 1) - ({exposure = 0.0})() + ({ exposure = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/FalseColor.swift b/framework/Source/Operations/FalseColor.swift index 36dcc112..f55d23b2 100644 --- a/framework/Source/Operations/FalseColor.swift +++ b/framework/Source/Operations/FalseColor.swift @@ -1,11 +1,11 @@ public class FalseColor: BasicOperation { - public var firstColor:Color = Color(red:0.0, green:0.0, blue:0.5, alpha:1.0) { didSet { uniformSettings["firstColor"] = firstColor } } - public var secondColor:Color = Color.red { didSet { uniformSettings["secondColor"] = secondColor } } + public var firstColor = Color(red: 0.0, green: 0.0, blue: 0.5, alpha: 1.0) { didSet { uniformSettings["firstColor"] = firstColor } } + public var secondColor = Color.red { didSet { uniformSettings["secondColor"] = secondColor } } public init() { - super.init(fragmentShader:FalseColorFragmentShader, numberOfInputs:1) + super.init(fragmentShader: FalseColorFragmentShader, numberOfInputs: 1) - ({firstColor = Color(red:0.0, green:0.0, blue:0.5, alpha:1.0)})() - ({secondColor = Color.red})() + ({ firstColor = Color(red: 0.0, green: 0.0, blue: 0.5, alpha: 1.0) })() + ({ secondColor = Color.red })() } } diff --git a/framework/Source/Operations/GammaAdjustment.swift b/framework/Source/Operations/GammaAdjustment.swift index 6db7bcf3..413419cd 100644 --- a/framework/Source/Operations/GammaAdjustment.swift +++ b/framework/Source/Operations/GammaAdjustment.swift @@ -1,9 +1,9 @@ public class GammaAdjustment: BasicOperation { - public var gamma:Float = 1.0 { didSet { uniformSettings["gamma"] = gamma } } + public var gamma: Float = 1.0 { didSet { uniformSettings["gamma"] = gamma } } public init() { - super.init(fragmentShader:GammaFragmentShader, numberOfInputs:1) + super.init(fragmentShader: GammaFragmentShader, numberOfInputs: 1) - ({gamma = 1.0})() + ({ gamma = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/GaussianBlur.swift b/framework/Source/Operations/GaussianBlur.swift index 6d3d70c6..7843f21e 100755 --- a/framework/Source/Operations/GaussianBlur.swift +++ b/framework/Source/Operations/GaussianBlur.swift @@ -6,13 +6,13 @@ let M_PI = 3.14159265359 // TODO: remove this once Foundation pulls this in on L import Foundation public class GaussianBlur: TwoStageOperation { - public var blurRadiusInPixels:Float { + public var blurRadiusInPixels: Float { didSet { - let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: overrideDownsamplingOptimization) sharedImageProcessingContext.runOperationAsynchronously { self.downsamplingFactor = downsamplingFactor let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: self.luminanceThreshold), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: self.luminanceThreshold))} + self.shader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: self.luminanceThreshold), fragmentShader: fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: self.luminanceThreshold)) } } } } @@ -21,11 +21,11 @@ public class GaussianBlur: TwoStageOperation { didSet { guard luminanceThreshold != oldValue else { return } uniformSettings["luminanceThreshold"] = luminanceThreshold - let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: overrideDownsamplingOptimization) sharedImageProcessingContext.runOperationAsynchronously { self.downsamplingFactor = downsamplingFactor let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: self.luminanceThreshold), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: self.luminanceThreshold))} + self.shader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: self.luminanceThreshold), fragmentShader: fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: self.luminanceThreshold)) } } } } @@ -33,10 +33,10 @@ public class GaussianBlur: TwoStageOperation { public init(blurRadiusInPixels: Float = 2.0, luminanceThreshold: Float? = nil) { self.blurRadiusInPixels = blurRadiusInPixels self.luminanceThreshold = luminanceThreshold - let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:false) + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: false) let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - let initialShader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: luminanceThreshold), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma), luminanceThreshold: luminanceThreshold))} - super.init(shader:initialShader, numberOfInputs:1) + let initialShader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: luminanceThreshold), fragmentShader: fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma), luminanceThreshold: luminanceThreshold)) } + super.init(shader: initialShader, numberOfInputs: 1) self.downsamplingFactor = downsamplingFactor if let luminanceThreshold = luminanceThreshold { self.uniformSettings["luminanceThreshold"] = luminanceThreshold @@ -48,22 +48,21 @@ public class GaussianBlur: TwoStageOperation { // MARK: - // MARK: Blur sizing calculations -func sigmaAndDownsamplingForBlurRadius(_ radius:Float, limit:Float, override:Bool = false) -> (sigma:Float, downsamplingFactor:Float?) { +func sigmaAndDownsamplingForBlurRadius(_ radius: Float, limit: Float, override: Bool = false) -> (sigma: Float, downsamplingFactor: Float?) { // For now, only do integral sigmas let startingRadius = Float(round(Double(radius))) - guard ((startingRadius > limit) && (!override)) else { return (sigma:startingRadius, downsamplingFactor:nil) } + guard (startingRadius > limit) && (!override) else { return (sigma:startingRadius, downsamplingFactor:nil) } return (sigma:limit, downsamplingFactor:startingRadius / limit) } - // inputRadius for Core Image's CIGaussianBlur is really sigma in the Gaussian equation, so I'm using that for my blur radius, to be consistent -func pixelRadiusForBlurSigma(_ sigma:Double) -> UInt { +func pixelRadiusForBlurSigma(_ sigma: Double) -> UInt { // 7.0 is the limit for blur size for hardcoded varying offsets let minimumWeightToFindEdgeOfSamplingArea = 1.0 / 256.0 - var calculatedSampleRadius:UInt = 0 - if (sigma >= 1.0) { // Avoid a divide-by-zero error here + var calculatedSampleRadius: UInt = 0 + if sigma >= 1.0 { // Avoid a divide-by-zero error here // Calculate the number of pixels to sample from by setting a bottom limit for the contribution of the outermost pixel calculatedSampleRadius = UInt(floor(sqrt(-2.0 * pow(sigma, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * .pi * pow(sigma, 2.0))) ))) calculatedSampleRadius += calculatedSampleRadius % 2 // There's nothing to gain from handling odd radius sizes, due to the optimizations I use @@ -75,32 +74,32 @@ func pixelRadiusForBlurSigma(_ sigma:Double) -> UInt { // MARK: - // MARK: Standard Gaussian blur shaders -func standardGaussianWeightsForRadius(_ blurRadius:UInt, sigma:Double) -> [Double] { +func standardGaussianWeightsForRadius(_ blurRadius: UInt, sigma: Double) -> [Double] { var gaussianWeights = [Double]() var sumOfWeights = 0.0 for gaussianWeightIndex in 0...blurRadius { let weight = (1.0 / sqrt(2.0 * .pi * pow(sigma, 2.0))) * exp(-pow(Double(gaussianWeightIndex), 2.0) / (2.0 * pow(sigma, 2.0))) gaussianWeights.append(weight) - if (gaussianWeightIndex == 0) { + if gaussianWeightIndex == 0 { sumOfWeights += weight } else { sumOfWeights += (weight * 2.0) } } - return gaussianWeights.map{$0 / sumOfWeights} + return gaussianWeights.map { $0 / sumOfWeights } } -func vertexShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { - guard (radius > 0) else { return OneInputVertexShader } +func vertexShaderForStandardGaussianBlurOfRadius(_ radius: UInt, sigma: Double) -> String { + guard radius > 0 else { return OneInputVertexShader } let numberOfBlurCoordinates = radius * 2 + 1 var shaderString = "varying vec2 textureCoordinate;\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 blurCoordinates[\(numberOfBlurCoordinates)];\n \n void main()\n {\n gl_Position = position;\n \n vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" for currentBlurCoordinateIndex in 0.. 0) { + } else if offsetFromCenter > 0 { shaderString += "blurCoordinates[\(currentBlurCoordinateIndex)] = inputTextureCoordinate.xy + singleStepOffset * \(Float(offsetFromCenter));\n" } else { shaderString += "blurCoordinates[\(currentBlurCoordinateIndex)] = inputTextureCoordinate.xy;\n" @@ -111,10 +110,10 @@ func vertexShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> return shaderString } -func fragmentShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { - guard (radius > 0) else { return PassthroughFragmentShader } +func fragmentShaderForStandardGaussianBlurOfRadius(_ radius: UInt, sigma: Double) -> String { + guard radius > 0 else { return PassthroughFragmentShader } - let gaussianWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) + let gaussianWeights = standardGaussianWeightsForRadius(radius, sigma: sigma) let numberOfBlurCoordinates = radius * 2 + 1 #if GLES @@ -125,7 +124,7 @@ func fragmentShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) for currentBlurCoordinateIndex in 0.. [Double] { - let standardWeights = standardGaussianWeightsForRadius(blurRadius, sigma:sigma) +func optimizedGaussianOffsetsForRadius(_ blurRadius: UInt, sigma: Double) -> [Double] { + let standardWeights = standardGaussianWeightsForRadius(blurRadius, sigma: sigma) let numberOfOptimizedOffsets = min(blurRadius / 2 + (blurRadius % 2), 7) var optimizedOffsets = [Double]() @@ -154,10 +153,10 @@ func optimizedGaussianOffsetsForRadius(_ blurRadius:UInt, sigma:Double) -> [Doub return optimizedOffsets } -func vertexShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double, luminanceThreshold: Float? = nil) -> String { - guard (radius > 0) else { return OneInputVertexShader } +func vertexShaderForOptimizedGaussianBlurOfRadius(_ radius: UInt, sigma: Double, luminanceThreshold: Float? = nil) -> String { + guard radius > 0 else { return OneInputVertexShader } - let optimizedOffsets = optimizedGaussianOffsetsForRadius(radius, sigma:sigma) + let optimizedOffsets = optimizedGaussianOffsetsForRadius(radius, sigma: sigma) let numberOfOptimizedOffsets = optimizedOffsets.count // Header @@ -180,10 +179,10 @@ func vertexShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double, l return shaderString } -func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double, luminanceThreshold: Float? = nil) -> String { - guard (radius > 0) else { return PassthroughFragmentShader } +func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius: UInt, sigma: Double, luminanceThreshold: Float? = nil) -> String { + guard radius > 0 else { return PassthroughFragmentShader } - let standardWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) + let standardWeights = standardGaussianWeightsForRadius(radius, sigma: sigma) let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) let trueNumberOfOptimizedOffsets = radius / 2 + (radius % 2) @@ -211,7 +210,7 @@ func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double, } // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader - if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets) { + if trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets { #if GLES shaderString += "highp vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" #else @@ -220,8 +219,8 @@ func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double, } for currentOverlowTextureRead in numberOfOptimizedOffsets.. ())? + public var blurRadiusInPixels: Float = 2.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var sensitivity: Float = 5.0 { didSet { harrisCornerDetector.uniformSettings["sensitivity"] = sensitivity } } + public var threshold: Float = 0.2 { didSet { nonMaximumSuppression.uniformSettings["threshold"] = threshold } } + public var cornersDetectedCallback: (([Position]) -> Void)? - let xyDerivative = TextureSamplingOperation(fragmentShader:XYDerivativeFragmentShader) + let xyDerivative = TextureSamplingOperation(fragmentShader: XYDerivativeFragmentShader) let gaussianBlur = GaussianBlur() - let harrisCornerDetector:BasicOperation - let nonMaximumSuppression = TextureSamplingOperation(fragmentShader:ThresholdedNonMaximumSuppressionFragmentShader) + let harrisCornerDetector: BasicOperation + let nonMaximumSuppression = TextureSamplingOperation(fragmentShader: ThresholdedNonMaximumSuppressionFragmentShader) - public init(fragmentShader:String = HarrisCornerDetectorFragmentShader) { - harrisCornerDetector = BasicOperation(fragmentShader:fragmentShader) + public init(fragmentShader: String = HarrisCornerDetectorFragmentShader) { + harrisCornerDetector = BasicOperation(fragmentShader: fragmentShader) super.init() - ({blurRadiusInPixels = 2.0})() - ({sensitivity = 5.0})() - ({threshold = 0.2})() + ({ blurRadiusInPixels = 2.0 })() + ({ sensitivity = 5.0 })() + ({ threshold = 0.2 })() outputImageRelay.newImageCallback = {[weak self] framebuffer in if let cornersDetectedCallback = self?.cornersDetectedCallback { @@ -50,19 +50,19 @@ public class HarrisCornerDetector: OperationGroup { } } - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.xyDerivative --> self.gaussianBlur --> self.harrisCornerDetector --> self.nonMaximumSuppression --> output } } } -func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { +func extractCornersFromImage(_ framebuffer: Framebuffer) -> [Position] { let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) // var rawImagePixels = [UInt8](count:imageByteSize, repeatedValue:0) // let startTime = CACurrentMediaTime() - let rawImagePixels = UnsafeMutablePointer.allocate(capacity:imageByteSize) + let rawImagePixels = UnsafeMutablePointer.allocate(capacity: imageByteSize) // -Onone, [UInt8] array: 30 ms for 720p frame on Retina iMac // -O, [UInt8] array: 4 ms for 720p frame on Retina iMac // -Onone, UnsafeMutablePointer: 7 ms for 720p frame on Retina iMac @@ -76,10 +76,10 @@ func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { var corners = [Position]() var currentByte = 0 - while (currentByte < imageByteSize) { + while currentByte < imageByteSize { let colorByte = rawImagePixels[currentByte] - if (colorByte > 0) { + if colorByte > 0 { let xCoordinate = currentByte % imageWidth let yCoordinate = currentByte / imageWidth diff --git a/framework/Source/Operations/Haze.swift b/framework/Source/Operations/Haze.swift index 4c394281..f64cc3ac 100644 --- a/framework/Source/Operations/Haze.swift +++ b/framework/Source/Operations/Haze.swift @@ -1,11 +1,11 @@ public class Haze: BasicOperation { - public var distance:Float = 0.2 { didSet { uniformSettings["hazeDistance"] = distance } } - public var slope:Float = 0.0 { didSet { uniformSettings["slope"] = slope } } + public var distance: Float = 0.2 { didSet { uniformSettings["hazeDistance"] = distance } } + public var slope: Float = 0.0 { didSet { uniformSettings["slope"] = slope } } public init() { - super.init(fragmentShader:HazeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: HazeFragmentShader, numberOfInputs: 1) - ({distance = 0.2})() - ({slope = 0.0})() + ({ distance = 0.2 })() + ({ slope = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HighPassFilter.swift b/framework/Source/Operations/HighPassFilter.swift index bad450dc..3d36fd46 100644 --- a/framework/Source/Operations/HighPassFilter.swift +++ b/framework/Source/Operations/HighPassFilter.swift @@ -7,11 +7,11 @@ public class HighPassFilter: OperationGroup { public override init() { super.init() - ({strength = 0.5})() + ({ strength = 0.5 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.differenceBlend input --> self.lowPass --> self.differenceBlend --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HighlightAndShadowTint.swift b/framework/Source/Operations/HighlightAndShadowTint.swift index 54b25d16..6d79cefd 100644 --- a/framework/Source/Operations/HighlightAndShadowTint.swift +++ b/framework/Source/Operations/HighlightAndShadowTint.swift @@ -1,15 +1,15 @@ public class HighlightAndShadowTint: BasicOperation { - public var shadowTintIntensity:Float = 0.0 { didSet { uniformSettings["shadowTintIntensity"] = shadowTintIntensity } } - public var highlightTintIntensity:Float = 0.0 { didSet { uniformSettings["highlightTintIntensity"] = highlightTintIntensity } } - public var shadowTintColor:Color = Color.red { didSet { uniformSettings["shadowTintColor"] = shadowTintColor } } - public var highlightTintColor:Color = Color.blue { didSet { uniformSettings["highlightTintColor"] = highlightTintColor } } + public var shadowTintIntensity: Float = 0.0 { didSet { uniformSettings["shadowTintIntensity"] = shadowTintIntensity } } + public var highlightTintIntensity: Float = 0.0 { didSet { uniformSettings["highlightTintIntensity"] = highlightTintIntensity } } + public var shadowTintColor = Color.red { didSet { uniformSettings["shadowTintColor"] = shadowTintColor } } + public var highlightTintColor = Color.blue { didSet { uniformSettings["highlightTintColor"] = highlightTintColor } } public init() { - super.init(fragmentShader:HighlightShadowTintFragmentShader, numberOfInputs:1) + super.init(fragmentShader: HighlightShadowTintFragmentShader, numberOfInputs: 1) - ({shadowTintIntensity = 0.0})() - ({highlightTintIntensity = 0.0})() - ({shadowTintColor = Color.red})() - ({highlightTintColor = Color.blue})() + ({ shadowTintIntensity = 0.0 })() + ({ highlightTintIntensity = 0.0 })() + ({ shadowTintColor = Color.red })() + ({ highlightTintColor = Color.blue })() } } diff --git a/framework/Source/Operations/HighlightsAndShadows.swift b/framework/Source/Operations/HighlightsAndShadows.swift index 95f77f72..3a414f8d 100644 --- a/framework/Source/Operations/HighlightsAndShadows.swift +++ b/framework/Source/Operations/HighlightsAndShadows.swift @@ -1,11 +1,11 @@ public class HighlightsAndShadows: BasicOperation { - public var shadows:Float = 0.0 { didSet { uniformSettings["shadows"] = shadows } } - public var highlights:Float = 1.0 { didSet { uniformSettings["highlights"] = highlights } } + public var shadows: Float = 0.0 { didSet { uniformSettings["shadows"] = shadows } } + public var highlights: Float = 1.0 { didSet { uniformSettings["highlights"] = highlights } } public init() { - super.init(fragmentShader:HighlightShadowFragmentShader, numberOfInputs:1) + super.init(fragmentShader: HighlightShadowFragmentShader, numberOfInputs: 1) - ({shadows = 0.0})() - ({highlights = 1.0})() + ({ shadows = 0.0 })() + ({ highlights = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Histogram.swift b/framework/Source/Operations/Histogram.swift index 5e2c7e98..5a4c6f5c 100755 --- a/framework/Source/Operations/Histogram.swift +++ b/framework/Source/Operations/Histogram.swift @@ -32,31 +32,31 @@ public enum HistogramType { } public class Histogram: BasicOperation { - public var downsamplingFactor:UInt = 16 + public var downsamplingFactor: UInt = 16 - var shader2:ShaderProgram? = nil - var shader3:ShaderProgram? = nil + var shader2: ShaderProgram? + var shader3: ShaderProgram? - public init(type:HistogramType) { + public init(type: HistogramType) { switch type { - case .red: super.init(vertexShader:HistogramRedSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .blue: super.init(vertexShader:HistogramBlueSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .green: super.init(vertexShader:HistogramGreenSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .luminance: super.init(vertexShader:HistogramLuminanceSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) + case .red: super.init(vertexShader: HistogramRedSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) + case .blue: super.init(vertexShader: HistogramBlueSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) + case .green: super.init(vertexShader: HistogramGreenSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) + case .luminance: super.init(vertexShader: HistogramLuminanceSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) case .rgb: - super.init(vertexShader:HistogramRedSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - shader2 = crashOnShaderCompileFailure("Histogram"){try sharedImageProcessingContext.programForVertexShader(HistogramGreenSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader)} - shader3 = crashOnShaderCompileFailure("Histogram"){try sharedImageProcessingContext.programForVertexShader(HistogramBlueSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader)} + super.init(vertexShader: HistogramRedSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader, numberOfInputs: 1) + shader2 = crashOnShaderCompileFailure("Histogram") { try sharedImageProcessingContext.programForVertexShader(HistogramGreenSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader) } + shader3 = crashOnShaderCompileFailure("Histogram") { try sharedImageProcessingContext.programForVertexShader(HistogramBlueSamplingVertexShader, fragmentShader: HistogramAccumulationFragmentShader) } } } override open func renderFrame() { let inputSize = sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!) let inputByteSize = Int(inputSize.width * inputSize.height * 4) - let data = UnsafeMutablePointer.allocate(capacity:inputByteSize) + let data = UnsafeMutablePointer.allocate(capacity: inputByteSize) glReadPixels(0, 0, inputSize.width, inputSize.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:256, height:3), stencil:mask != nil) + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: 256, height: 3), stencil: mask != nil) releaseIncomingFramebuffers() renderFramebuffer.activateFramebufferForRendering() diff --git a/framework/Source/Operations/HistogramDisplay.swift b/framework/Source/Operations/HistogramDisplay.swift index c12b77e3..3f18e065 100644 --- a/framework/Source/Operations/HistogramDisplay.swift +++ b/framework/Source/Operations/HistogramDisplay.swift @@ -1,5 +1,5 @@ public class HistogramDisplay: BasicOperation { public init() { - super.init(vertexShader:HistogramDisplayVertexShader, fragmentShader:HistogramDisplayFragmentShader, numberOfInputs:1) + super.init(vertexShader: HistogramDisplayVertexShader, fragmentShader: HistogramDisplayFragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HistogramEqualization.swift b/framework/Source/Operations/HistogramEqualization.swift index 49dc45a3..de58822b 100644 --- a/framework/Source/Operations/HistogramEqualization.swift +++ b/framework/Source/Operations/HistogramEqualization.swift @@ -1,31 +1,30 @@ public class HistogramEqualization: OperationGroup { public var downsamplingFactor: UInt = 16 { didSet { histogram.downsamplingFactor = downsamplingFactor } } - let histogram:Histogram + let histogram: Histogram let rawDataInput = RawDataInput() let rawDataOutput = RawDataOutput() - let equalizationFilter:BasicOperation + let equalizationFilter: BasicOperation - public init(type:HistogramType) { - - self.histogram = Histogram(type:type) + public init(type: HistogramType) { + self.histogram = Histogram(type: type) switch type { - case .red: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationRedFragmentShader, numberOfInputs:2) - case .blue: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationBlueFragmentShader, numberOfInputs:2) - case .green: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationGreenFragmentShader, numberOfInputs:2) - case .luminance: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationLuminanceFragmentShader, numberOfInputs:2) - case .rgb: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationRGBFragmentShader, numberOfInputs:2) + case .red: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationRedFragmentShader, numberOfInputs: 2) + case .blue: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationBlueFragmentShader, numberOfInputs: 2) + case .green: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationGreenFragmentShader, numberOfInputs: 2) + case .luminance: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationLuminanceFragmentShader, numberOfInputs: 2) + case .rgb: self.equalizationFilter = BasicOperation(fragmentShader: HistogramEqualizationRGBFragmentShader, numberOfInputs: 2) } super.init() - ({downsamplingFactor = 16})() + ({ downsamplingFactor = 16 })() - self.configureGroup{input, output in + self.configureGroup {input, output in self.rawDataOutput.dataAvailableCallback = {data in - var redHistogramBin = [Int](repeating:0, count:256) - var greenHistogramBin = [Int](repeating:0, count:256) - var blueHistogramBin = [Int](repeating:0, count:256) + var redHistogramBin = [Int](repeating: 0, count: 256) + var greenHistogramBin = [Int](repeating: 0, count: 256) + var blueHistogramBin = [Int](repeating: 0, count: 256) let rowWidth = 256 * 4 redHistogramBin[0] = Int(data[rowWidth]) @@ -38,7 +37,7 @@ public class HistogramEqualization: OperationGroup { blueHistogramBin[dataIndex] = blueHistogramBin[dataIndex - 1] + Int(data[rowWidth + (dataIndex * 4) + 2]) } - var equalizationLookupTable = [UInt8](repeating:0, count:256 * 4) + var equalizationLookupTable = [UInt8](repeating: 0, count: 256 * 4) for binIndex in 0..<256 { equalizationLookupTable[binIndex * 4] = UInt8((((redHistogramBin[binIndex] - redHistogramBin[0]) * 255) / redHistogramBin[255])) equalizationLookupTable[(binIndex * 4) + 1] = UInt8((((greenHistogramBin[binIndex] - greenHistogramBin[0]) * 255) / greenHistogramBin[255])) @@ -46,7 +45,7 @@ public class HistogramEqualization: OperationGroup { equalizationLookupTable[(binIndex * 4) + 3] = 255 } - self.rawDataInput.uploadBytes(equalizationLookupTable, size:Size(width:256, height:1), pixelFormat:.rgba) + self.rawDataInput.uploadBytes(equalizationLookupTable, size: Size(width: 256, height: 1), pixelFormat: .rgba) } input --> self.histogram --> self.rawDataOutput diff --git a/framework/Source/Operations/HueAdjustment.swift b/framework/Source/Operations/HueAdjustment.swift index 14891889..81ad46f4 100644 --- a/framework/Source/Operations/HueAdjustment.swift +++ b/framework/Source/Operations/HueAdjustment.swift @@ -1,9 +1,9 @@ public class HueAdjustment: BasicOperation { - public var hue:Float = 90.0 { didSet { uniformSettings["hueAdjust"] = hue } } + public var hue: Float = 90.0 { didSet { uniformSettings["hueAdjust"] = hue } } public init() { - super.init(fragmentShader:HueFragmentShader, numberOfInputs:1) + super.init(fragmentShader: HueFragmentShader, numberOfInputs: 1) - ({hue = 90.0})() + ({ hue = 90.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HueBlend.swift b/framework/Source/Operations/HueBlend.swift index 33956fe6..482bac7b 100644 --- a/framework/Source/Operations/HueBlend.swift +++ b/framework/Source/Operations/HueBlend.swift @@ -1,5 +1,5 @@ public class HueBlend: BasicOperation { public init() { - super.init(fragmentShader:HueBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: HueBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ImageBuffer.swift b/framework/Source/Operations/ImageBuffer.swift index 6b5b5675..fa09905e 100644 --- a/framework/Source/Operations/ImageBuffer.swift +++ b/framework/Source/Operations/ImageBuffer.swift @@ -1,16 +1,16 @@ public class ImageBuffer: ImageProcessingOperation { // TODO: Dynamically release framebuffers on buffer resize - public var bufferSize:UInt = 1 + public var bufferSize: UInt = 1 public var activatePassthroughOnNextFrame = true - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 public let targets = TargetContainer() public let sources = SourceContainer() var bufferedFramebuffers = [Framebuffer]() - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { bufferedFramebuffers.append(framebuffer) - if (bufferedFramebuffers.count > Int(bufferSize)) { + if bufferedFramebuffers.count > Int(bufferSize) { let releasedFramebuffer = bufferedFramebuffers.removeFirst() updateTargetsWithFramebuffer(releasedFramebuffer) releasedFramebuffer.unlock() @@ -23,7 +23,7 @@ public class ImageBuffer: ImageProcessingOperation { } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Buffers most likely won't need this } } diff --git a/framework/Source/Operations/KuwaharaFilter.swift b/framework/Source/Operations/KuwaharaFilter.swift index 7063961e..83725ab2 100644 --- a/framework/Source/Operations/KuwaharaFilter.swift +++ b/framework/Source/Operations/KuwaharaFilter.swift @@ -1,9 +1,9 @@ public class KuwaharaFilter: BasicOperation { - public var radius:Int = 3 { didSet { uniformSettings["radius"] = radius } } + public var radius: Int = 3 { didSet { uniformSettings["radius"] = radius } } public init() { - super.init(fragmentShader:KuwaharaFragmentShader, numberOfInputs:1) + super.init(fragmentShader: KuwaharaFragmentShader, numberOfInputs: 1) - ({radius = 3})() + ({ radius = 3 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/KuwaharaRadius3Filter.swift b/framework/Source/Operations/KuwaharaRadius3Filter.swift index 6380d541..60f3030f 100644 --- a/framework/Source/Operations/KuwaharaRadius3Filter.swift +++ b/framework/Source/Operations/KuwaharaRadius3Filter.swift @@ -1,5 +1,5 @@ public class KuwaharaRadius3Filter: BasicOperation { public init() { - super.init(fragmentShader:KuwaharaRadius3FragmentShader, numberOfInputs:1) + super.init(fragmentShader: KuwaharaRadius3FragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LanczosResampling.swift b/framework/Source/Operations/LanczosResampling.swift index fb6c3af2..609564f5 100644 --- a/framework/Source/Operations/LanczosResampling.swift +++ b/framework/Source/Operations/LanczosResampling.swift @@ -1,32 +1,32 @@ public class LanczosResampling: BasicOperation { public init() { - super.init(vertexShader:LanczosResamplingVertexShader, fragmentShader:LanczosResamplingFragmentShader) + super.init(vertexShader: LanczosResamplingVertexShader, fragmentShader: LanczosResamplingFragmentShader) } - override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { + override func internalRenderFunction(_ inputFramebuffer: Framebuffer, textureProperties: [InputTextureProperties]) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) // Shrink the vertical component of the first stage let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait) - let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:inputSize.width, height:renderFramebuffer.size.height), stencil:false) + let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: inputSize.width, height: renderFramebuffer.size.height), stencil: false) firstStageFramebuffer.lock() firstStageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - let texelSize = inputFramebuffer.initialStageTexelSize(for:outputRotation) + let texelSize = inputFramebuffer.initialStageTexelSize(for: outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) releaseIncomingFramebuffers() // Shrink the width component of the result - let secondStageTexelSize = firstStageFramebuffer.texelSize(for:.noRotation) + let secondStageTexelSize = firstStageFramebuffer.texelSize(for: .noRotation) uniformSettings["texelWidth"] = secondStageTexelSize.width uniformSettings["texelHeight"] = 0.0 renderFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) firstStageFramebuffer.unlock() } } diff --git a/framework/Source/Operations/Laplacian.swift b/framework/Source/Operations/Laplacian.swift index 0ce6ec13..18ac8703 100644 --- a/framework/Source/Operations/Laplacian.swift +++ b/framework/Source/Operations/Laplacian.swift @@ -1,5 +1,5 @@ public class Laplacian: TextureSamplingOperation { public init() { - super.init(fragmentShader:LaplacianFragmentShader) + super.init(fragmentShader: LaplacianFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LevelsAdjustment.swift b/framework/Source/Operations/LevelsAdjustment.swift index e1eaba74..8bcd17c3 100644 --- a/framework/Source/Operations/LevelsAdjustment.swift +++ b/framework/Source/Operations/LevelsAdjustment.swift @@ -1,19 +1,19 @@ public class LevelsAdjustment: BasicOperation { - public var minimum:Color = Color(red:0.0, green:0.0, blue:0.0) { didSet { uniformSettings["levelMinimum"] = minimum } } - public var middle:Color = Color(red:1.0, green:1.0, blue:1.0) { didSet { uniformSettings["levelMiddle"] = middle } } - public var maximum:Color = Color(red:1.0, green:1.0, blue:1.0) { didSet { uniformSettings["levelMaximum"] = maximum } } - public var minOutput:Color = Color(red:0.0, green:0.0, blue:0.0) { didSet { uniformSettings["minOutput"] = minOutput } } - public var maxOutput:Color = Color(red:1.0, green:1.0, blue:1.0) { didSet { uniformSettings["maxOutput"] = maxOutput } } + public var minimum = Color(red: 0.0, green: 0.0, blue: 0.0) { didSet { uniformSettings["levelMinimum"] = minimum } } + public var middle = Color(red: 1.0, green: 1.0, blue: 1.0) { didSet { uniformSettings["levelMiddle"] = middle } } + public var maximum = Color(red: 1.0, green: 1.0, blue: 1.0) { didSet { uniformSettings["levelMaximum"] = maximum } } + public var minOutput = Color(red: 0.0, green: 0.0, blue: 0.0) { didSet { uniformSettings["minOutput"] = minOutput } } + public var maxOutput = Color(red: 1.0, green: 1.0, blue: 1.0) { didSet { uniformSettings["maxOutput"] = maxOutput } } // TODO: Is this an acceptable interface, or do I need to bring this closer to the old implementation? public init() { - super.init(fragmentShader:LevelsFragmentShader, numberOfInputs:1) + super.init(fragmentShader: LevelsFragmentShader, numberOfInputs: 1) - ({minimum = Color(red:0.0, green:0.0, blue:0.0)})() - ({middle = Color(red:1.0, green:1.0, blue:1.0)})() - ({maximum = Color(red:1.0, green:1.0, blue:1.0)})() - ({minOutput = Color(red:0.0, green:0.0, blue:0.0)})() - ({maxOutput = Color(red:1.0, green:1.0, blue:1.0)})() + ({ minimum = Color(red: 0.0, green: 0.0, blue: 0.0) })() + ({ middle = Color(red: 1.0, green: 1.0, blue: 1.0) })() + ({ maximum = Color(red: 1.0, green: 1.0, blue: 1.0) })() + ({ minOutput = Color(red: 0.0, green: 0.0, blue: 0.0) })() + ({ maxOutput = Color(red: 1.0, green: 1.0, blue: 1.0) })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LightenBlend.swift b/framework/Source/Operations/LightenBlend.swift index 4f5a261f..7ee018a6 100644 --- a/framework/Source/Operations/LightenBlend.swift +++ b/framework/Source/Operations/LightenBlend.swift @@ -1,5 +1,5 @@ public class LightenBlend: BasicOperation { public init() { - super.init(fragmentShader:LightenBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: LightenBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LineGenerator.swift b/framework/Source/Operations/LineGenerator.swift index 0120a184..a9eeb9d8 100644 --- a/framework/Source/Operations/LineGenerator.swift +++ b/framework/Source/Operations/LineGenerator.swift @@ -13,44 +13,44 @@ #endif public enum Line { - case infinite(slope:Float, intercept:Float) - case segment(p1:Position, p2:Position) + case infinite(slope: Float, intercept: Float) + case segment(p1: Position, p2: Position) func toGLEndpoints() -> [GLfloat] { switch self { case .infinite(let slope, let intercept): - if (slope > 9000.0) {// Vertical line + if slope > 9000.0 {// Vertical line return [intercept, -1.0, intercept, 1.0] } else { return [-1.0, GLfloat(slope * -1.0 + intercept), 1.0, GLfloat(slope * 1.0 + intercept)] } case .segment(let p1, let p2): - return [p1.x, p1.y, p2.x, p2.y].map {GLfloat($0)} + return [p1.x, p1.y, p2.x, p2.y].map { GLfloat($0) } } } } public class LineGenerator: ImageGenerator { - public var lineColor:Color = Color.green { didSet { uniformSettings["lineColor"] = lineColor } } - public var lineWidth:Float = 1.0 { + public var lineColor = Color.green { didSet { uniformSettings["lineColor"] = lineColor } } + public var lineWidth: Float = 1.0 { didSet { lineShader.use() glLineWidth(lineWidth) } } - let lineShader:ShaderProgram + let lineShader: ShaderProgram var uniformSettings = ShaderUniformSettings() - public override init(size:Size) { - lineShader = crashOnShaderCompileFailure("LineGenerator"){try sharedImageProcessingContext.programForVertexShader(LineVertexShader, fragmentShader:LineFragmentShader)} - super.init(size:size) + public override init(size: Size) { + lineShader = crashOnShaderCompileFailure("LineGenerator") { try sharedImageProcessingContext.programForVertexShader(LineVertexShader, fragmentShader: LineFragmentShader) } + super.init(size: size) - ({lineWidth = 1.0})() - ({lineColor = Color.red})() + ({ lineWidth = 1.0 })() + ({ lineColor = Color.red })() } - public func renderLines(_ lines:[Line]) { + public func renderLines(_ lines: [Line]) { imageFramebuffer.activateFramebufferForRendering() lineShader.use() @@ -60,10 +60,9 @@ public class LineGenerator: ImageGenerator { guard let positionAttribute = lineShader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } - let lineEndpoints = lines.flatMap{$0.toGLEndpoints()} + let lineEndpoints = lines.flatMap { $0.toGLEndpoints() } glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, lineEndpoints) - enableAdditiveBlending() glDrawArrays(GLenum(GL_LINES), 0, GLsizei(lines.count) * 2) diff --git a/framework/Source/Operations/LinearBurnBlend.swift b/framework/Source/Operations/LinearBurnBlend.swift index 13a3f93f..8527b04e 100644 --- a/framework/Source/Operations/LinearBurnBlend.swift +++ b/framework/Source/Operations/LinearBurnBlend.swift @@ -1,5 +1,5 @@ public class LinearBurnBlend: BasicOperation { public init() { - super.init(fragmentShader:LinearBurnBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: LinearBurnBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LocalBinaryPattern.swift b/framework/Source/Operations/LocalBinaryPattern.swift index ea78f4d6..bb08493f 100644 --- a/framework/Source/Operations/LocalBinaryPattern.swift +++ b/framework/Source/Operations/LocalBinaryPattern.swift @@ -14,6 +14,6 @@ public class LocalBinaryPattern: TextureSamplingOperation { public init() { - super.init(fragmentShader:LocalBinaryPatternFragmentShader) + super.init(fragmentShader: LocalBinaryPatternFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LookupFilter.swift b/framework/Source/Operations/LookupFilter.swift index 72731839..836c9ca1 100755 --- a/framework/Source/Operations/LookupFilter.swift +++ b/framework/Source/Operations/LookupFilter.swift @@ -1,18 +1,18 @@ // PictureInput isn't defined yet on Linux, so this operation is inoperable there #if !os(Linux) public class LookupFilter: BasicOperation { - public var intensity:Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } - public var lookupImage:PictureInput? { // TODO: Check for retain cycles in all cases here + public var intensity: Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } + public var lookupImage: PictureInput? { // TODO: Check for retain cycles in all cases here didSet { - lookupImage?.addTarget(self, atTargetIndex:1) + lookupImage?.addTarget(self, atTargetIndex: 1) lookupImage?.processImage() } } public init() { - super.init(fragmentShader:LookupFragmentShader, numberOfInputs:2) + super.init(fragmentShader: LookupFragmentShader, numberOfInputs: 2) - ({intensity = 1.0})() + ({ intensity = 1.0 })() } } diff --git a/framework/Source/Operations/LowPassFilter.swift b/framework/Source/Operations/LowPassFilter.swift index a2de9ed3..1d43ae71 100644 --- a/framework/Source/Operations/LowPassFilter.swift +++ b/framework/Source/Operations/LowPassFilter.swift @@ -8,13 +8,13 @@ public class LowPassFilter: OperationGroup { super.init() buffer.bufferSize = 1 - ({strength = 0.5})() + ({ strength = 0.5 })() - self.configureGroup{input, output in + self.configureGroup {input, output in // This is needed to break the cycle on the very first pass through the blend loop self.dissolveBlend.activatePassthroughOnNextFrame = true // TODO: this may be a retain cycle input --> self.dissolveBlend --> self.buffer --> self.dissolveBlend --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Luminance.swift b/framework/Source/Operations/Luminance.swift index fa5cc730..4126cfc1 100755 --- a/framework/Source/Operations/Luminance.swift +++ b/framework/Source/Operations/Luminance.swift @@ -1,5 +1,5 @@ public class Luminance: BasicOperation { public init() { - super.init(fragmentShader:LuminanceFragmentShader, numberOfInputs:1) + super.init(fragmentShader: LuminanceFragmentShader, numberOfInputs: 1) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LuminanceRangeReduction.swift b/framework/Source/Operations/LuminanceRangeReduction.swift index db85cba3..3849ae17 100644 --- a/framework/Source/Operations/LuminanceRangeReduction.swift +++ b/framework/Source/Operations/LuminanceRangeReduction.swift @@ -1,9 +1,9 @@ public class LuminanceRangeReduction: BasicOperation { - public var rangeReductionFactor:Float = 0.6 { didSet { uniformSettings["rangeReduction"] = rangeReductionFactor } } + public var rangeReductionFactor: Float = 0.6 { didSet { uniformSettings["rangeReduction"] = rangeReductionFactor } } public init() { - super.init(fragmentShader:LuminanceRangeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: LuminanceRangeFragmentShader, numberOfInputs: 1) - ({rangeReductionFactor = 0.6})() + ({ rangeReductionFactor = 0.6 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LuminanceThreshold.swift b/framework/Source/Operations/LuminanceThreshold.swift index 5b69200e..cc2cfffa 100644 --- a/framework/Source/Operations/LuminanceThreshold.swift +++ b/framework/Source/Operations/LuminanceThreshold.swift @@ -1,9 +1,9 @@ public class LuminanceThreshold: BasicOperation { - public var threshold:Float = 0.5 { didSet { uniformSettings["threshold"] = threshold } } + public var threshold: Float = 0.5 { didSet { uniformSettings["threshold"] = threshold } } public init() { - super.init(fragmentShader:LuminanceThresholdFragmentShader, numberOfInputs:1) + super.init(fragmentShader: LuminanceThresholdFragmentShader, numberOfInputs: 1) - ({threshold = 0.5})() + ({ threshold = 0.5 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LuminosityBlend.swift b/framework/Source/Operations/LuminosityBlend.swift index d4fca389..50815d48 100644 --- a/framework/Source/Operations/LuminosityBlend.swift +++ b/framework/Source/Operations/LuminosityBlend.swift @@ -1,5 +1,5 @@ public class LuminosityBlend: BasicOperation { public init() { - super.init(fragmentShader:LuminosityBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: LuminosityBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/MedianFilter.swift b/framework/Source/Operations/MedianFilter.swift index 016371d3..1efa67fb 100644 --- a/framework/Source/Operations/MedianFilter.swift +++ b/framework/Source/Operations/MedianFilter.swift @@ -1,5 +1,5 @@ public class MedianFilter: TextureSamplingOperation { public init() { - super.init(fragmentShader:MedianFragmentShader) + super.init(fragmentShader: MedianFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/MissEtikateFilter.swift b/framework/Source/Operations/MissEtikateFilter.swift index 49f38efd..ad1d96f3 100755 --- a/framework/Source/Operations/MissEtikateFilter.swift +++ b/framework/Source/Operations/MissEtikateFilter.swift @@ -11,9 +11,8 @@ public class MissEtikateFilter: LookupFilter { super.init() do { - try ({lookupImage = try PictureInput(imageName:"lookup_miss_etikate.png")})() - } - catch { + try ({ lookupImage = try PictureInput(imageName: "lookup_miss_etikate.png") })() + } catch { print("ERROR: Unable to create PictureInput \(error)") } } diff --git a/framework/Source/Operations/MonochromeFilter.swift b/framework/Source/Operations/MonochromeFilter.swift index d94d3135..71a98437 100644 --- a/framework/Source/Operations/MonochromeFilter.swift +++ b/framework/Source/Operations/MonochromeFilter.swift @@ -1,11 +1,11 @@ public class MonochromeFilter: BasicOperation { - public var intensity:Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } - public var color:Color = Color(red:0.6, green:0.45, blue:0.3, alpha:1.0) { didSet { uniformSettings["filterColor"] = color } } + public var intensity: Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } + public var color = Color(red: 0.6, green: 0.45, blue: 0.3, alpha: 1.0) { didSet { uniformSettings["filterColor"] = color } } public init() { - super.init(fragmentShader:MonochromeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: MonochromeFragmentShader, numberOfInputs: 1) - ({intensity = 1.0})() - ({color = Color(red:0.6, green:0.45, blue:0.3, alpha:1.0)})() + ({ intensity = 1.0 })() + ({ color = Color(red: 0.6, green: 0.45, blue: 0.3, alpha: 1.0) })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/MotionBlur.swift b/framework/Source/Operations/MotionBlur.swift index 8b3f0607..b5c62bcc 100755 --- a/framework/Source/Operations/MotionBlur.swift +++ b/framework/Source/Operations/MotionBlur.swift @@ -5,19 +5,19 @@ import Glibc import Foundation public class MotionBlur: BasicOperation { - public var blurSize:Float = 2.5 - public var blurAngle:Float = 0.0 + public var blurSize: Float = 2.5 + public var blurAngle: Float = 0.0 public init() { - super.init(vertexShader:MotionBlurVertexShader, fragmentShader:MotionBlurFragmentShader, numberOfInputs:1) + super.init(vertexShader: MotionBlurVertexShader, fragmentShader: MotionBlurFragmentShader, numberOfInputs: 1) } - override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let texelSize = inputFramebuffer.texelSize(for:outputRotation) + let texelSize = inputFramebuffer.texelSize(for: outputRotation) let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) - let directionalTexelStep:Position + let directionalTexelStep: Position if outputRotation.flipsDimensions() { let xOffset = blurSize * Float(sin(Double(blurAngle) * .pi / 180.0)) * aspectRatio * texelSize.width let yOffset = blurSize * Float(cos(Double(blurAngle) * .pi / 180.0)) * texelSize.width diff --git a/framework/Source/Operations/MotionDetector.swift b/framework/Source/Operations/MotionDetector.swift index 50c64030..11abecfc 100644 --- a/framework/Source/Operations/MotionDetector.swift +++ b/framework/Source/Operations/MotionDetector.swift @@ -1,9 +1,9 @@ public class MotionDetector: OperationGroup { - public var lowPassStrength:Float = 1.0 { didSet {lowPassFilter.strength = lowPassStrength}} - public var motionDetectedCallback:((Position, Float) -> ())? + public var lowPassStrength: Float = 1.0 { didSet { lowPassFilter.strength = lowPassStrength } } + public var motionDetectedCallback: ((Position, Float) -> Void)? let lowPassFilter = LowPassFilter() - let motionComparison = BasicOperation(fragmentShader:MotionComparisonFragmentShader, numberOfInputs:2) + let motionComparison = BasicOperation(fragmentShader: MotionComparisonFragmentShader, numberOfInputs: 2) let averageColorExtractor = AverageColorExtractor() public override init() { @@ -13,7 +13,7 @@ public class MotionDetector: OperationGroup { self?.motionDetectedCallback?(Position(color.redComponent / color.alphaComponent, color.greenComponent / color.alphaComponent), color.alphaComponent) } - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.motionComparison --> self.averageColorExtractor --> output input --> self.lowPassFilter --> self.motionComparison } diff --git a/framework/Source/Operations/MultiplyBlend.swift b/framework/Source/Operations/MultiplyBlend.swift index 1ef94331..b8c9e8ff 100644 --- a/framework/Source/Operations/MultiplyBlend.swift +++ b/framework/Source/Operations/MultiplyBlend.swift @@ -1,5 +1,5 @@ public class MultiplyBlend: BasicOperation { public init() { - super.init(fragmentShader:MultiplyBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: MultiplyBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/NobleCornerDetector.swift b/framework/Source/Operations/NobleCornerDetector.swift index 473779f9..d1f63ced 100644 --- a/framework/Source/Operations/NobleCornerDetector.swift +++ b/framework/Source/Operations/NobleCornerDetector.swift @@ -6,6 +6,6 @@ public class NobleCornerDetector: HarrisCornerDetector { public init() { - super.init(fragmentShader:NobleCornerDetectorFragmentShader) + super.init(fragmentShader: NobleCornerDetectorFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/NormalBlend.swift b/framework/Source/Operations/NormalBlend.swift index 9d483917..72efef54 100644 --- a/framework/Source/Operations/NormalBlend.swift +++ b/framework/Source/Operations/NormalBlend.swift @@ -1,5 +1,5 @@ public class NormalBlend: BasicOperation { public init() { - super.init(fragmentShader:NormalBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: NormalBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/OpacityAdjustment.swift b/framework/Source/Operations/OpacityAdjustment.swift index 3637d8a5..1da2ce80 100644 --- a/framework/Source/Operations/OpacityAdjustment.swift +++ b/framework/Source/Operations/OpacityAdjustment.swift @@ -1,9 +1,9 @@ public class OpacityAdjustment: BasicOperation { - public var opacity:Float = 0.0 { didSet { uniformSettings["opacity"] = opacity } } + public var opacity: Float = 0.0 { didSet { uniformSettings["opacity"] = opacity } } public init() { - super.init(fragmentShader:OpacityFragmentShader, numberOfInputs:1) + super.init(fragmentShader: OpacityFragmentShader, numberOfInputs: 1) - ({opacity = 0.0})() + ({ opacity = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/OpeningFilter.swift b/framework/Source/Operations/OpeningFilter.swift index 65aec8ae..a8fbde72 100644 --- a/framework/Source/Operations/OpeningFilter.swift +++ b/framework/Source/Operations/OpeningFilter.swift @@ -1,5 +1,5 @@ public class OpeningFilter: OperationGroup { - public var radius:UInt { + public var radius: UInt { didSet { erosion.radius = radius dilation.radius = radius @@ -12,8 +12,8 @@ public class OpeningFilter: OperationGroup { radius = 1 super.init() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.erosion --> self.dilation --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/OverlayBlend.swift b/framework/Source/Operations/OverlayBlend.swift index 460722d9..6387ebe4 100644 --- a/framework/Source/Operations/OverlayBlend.swift +++ b/framework/Source/Operations/OverlayBlend.swift @@ -1,5 +1,5 @@ public class OverlayBlend: BasicOperation { public init() { - super.init(fragmentShader:OverlayBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: OverlayBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/PinchDistortion.swift b/framework/Source/Operations/PinchDistortion.swift index 4ab7c4b5..05ea47d3 100644 --- a/framework/Source/Operations/PinchDistortion.swift +++ b/framework/Source/Operations/PinchDistortion.swift @@ -1,13 +1,13 @@ public class PinchDistortion: BasicOperation { - public var radius:Float = 1.0 { didSet { uniformSettings["radius"] = radius } } - public var scale:Float = 0.5 { didSet { uniformSettings["scale"] = scale } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var radius: Float = 1.0 { didSet { uniformSettings["radius"] = radius } } + public var scale: Float = 0.5 { didSet { uniformSettings["scale"] = scale } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:PinchDistortionFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PinchDistortionFragmentShader, numberOfInputs: 1) - ({radius = 1.0})() - ({scale = 0.5})() - ({center = Position.center})() + ({ radius = 1.0 })() + ({ scale = 0.5 })() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/Pixellate.swift b/framework/Source/Operations/Pixellate.swift index da283de5..34c3ce64 100644 --- a/framework/Source/Operations/Pixellate.swift +++ b/framework/Source/Operations/Pixellate.swift @@ -1,5 +1,5 @@ public class Pixellate: BasicOperation { - public var fractionalWidthOfAPixel:Float = 0.01 { + public var fractionalWidthOfAPixel: Float = 0.01 { didSet { let imageWidth = 1.0 / Float(self.renderFramebuffer?.size.width ?? 2048) uniformSettings["fractionalWidthOfPixel"] = max(fractionalWidthOfAPixel, imageWidth) @@ -7,8 +7,8 @@ public class Pixellate: BasicOperation { } public init() { - super.init(fragmentShader:PixellateFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PixellateFragmentShader, numberOfInputs: 1) - ({fractionalWidthOfAPixel = 0.01})() + ({ fractionalWidthOfAPixel = 0.01 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/PolarPixellate.swift b/framework/Source/Operations/PolarPixellate.swift index 230da53a..dfcc1adc 100644 --- a/framework/Source/Operations/PolarPixellate.swift +++ b/framework/Source/Operations/PolarPixellate.swift @@ -1,11 +1,11 @@ public class PolarPixellate: BasicOperation { - public var pixelSize:Size = Size(width:0.05, height:0.05) { didSet { uniformSettings["pixelSize"] = pixelSize } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var pixelSize = Size(width: 0.05, height: 0.05) { didSet { uniformSettings["pixelSize"] = pixelSize } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:PolarPixellateFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PolarPixellateFragmentShader, numberOfInputs: 1) - ({pixelSize = Size(width:0.05, height:0.05)})() - ({center = Position.center})() + ({ pixelSize = Size(width: 0.05, height: 0.05) })() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/PolkaDot.swift b/framework/Source/Operations/PolkaDot.swift index 4f020d24..ecae50c7 100644 --- a/framework/Source/Operations/PolkaDot.swift +++ b/framework/Source/Operations/PolkaDot.swift @@ -1,6 +1,6 @@ public class PolkaDot: BasicOperation { - public var dotScaling:Float = 0.90 { didSet { uniformSettings["dotScaling"] = dotScaling } } - public var fractionalWidthOfAPixel:Float = 0.01 { + public var dotScaling: Float = 0.90 { didSet { uniformSettings["dotScaling"] = dotScaling } } + public var fractionalWidthOfAPixel: Float = 0.01 { didSet { let imageWidth = 1.0 / Float(self.renderFramebuffer?.size.width ?? 2048) uniformSettings["fractionalWidthOfPixel"] = max(fractionalWidthOfAPixel, imageWidth) @@ -8,9 +8,9 @@ public class PolkaDot: BasicOperation { } public init() { - super.init(fragmentShader:PolkaDotFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PolkaDotFragmentShader, numberOfInputs: 1) - ({fractionalWidthOfAPixel = 0.01})() - ({dotScaling = 0.90})() + ({ fractionalWidthOfAPixel = 0.01 })() + ({ dotScaling = 0.90 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Posterize.swift b/framework/Source/Operations/Posterize.swift index 501b333f..998b62b9 100644 --- a/framework/Source/Operations/Posterize.swift +++ b/framework/Source/Operations/Posterize.swift @@ -1,9 +1,9 @@ public class Posterize: BasicOperation { - public var colorLevels:Float = 10.0 { didSet { uniformSettings["colorLevels"] = colorLevels } } + public var colorLevels: Float = 10.0 { didSet { uniformSettings["colorLevels"] = colorLevels } } public init() { - super.init(fragmentShader:PosterizeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PosterizeFragmentShader, numberOfInputs: 1) - ({colorLevels = 10.0})() + ({ colorLevels = 10.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/PrewittEdgeDetection.swift b/framework/Source/Operations/PrewittEdgeDetection.swift index f42220c6..a859cf9d 100644 --- a/framework/Source/Operations/PrewittEdgeDetection.swift +++ b/framework/Source/Operations/PrewittEdgeDetection.swift @@ -1,9 +1,9 @@ public class PrewittEdgeDetection: TextureSamplingOperation { - public var edgeStrength:Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } + public var edgeStrength: Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } public init() { - super.init(fragmentShader:PrewittEdgeDetectionFragmentShader) + super.init(fragmentShader: PrewittEdgeDetectionFragmentShader) - ({edgeStrength = 1.0})() + ({ edgeStrength = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/RGBAdjustmentFilter.swift b/framework/Source/Operations/RGBAdjustmentFilter.swift index a437a07d..18c3b94b 100644 --- a/framework/Source/Operations/RGBAdjustmentFilter.swift +++ b/framework/Source/Operations/RGBAdjustmentFilter.swift @@ -1,13 +1,13 @@ public class RGBAdjustment: BasicOperation { - public var red:Float = 1.0 { didSet { uniformSettings["redAdjustment"] = red } } - public var blue:Float = 1.0 { didSet { uniformSettings["blueAdjustment"] = blue } } - public var green:Float = 1.0 { didSet { uniformSettings["greenAdjustment"] = green } } + public var red: Float = 1.0 { didSet { uniformSettings["redAdjustment"] = red } } + public var blue: Float = 1.0 { didSet { uniformSettings["blueAdjustment"] = blue } } + public var green: Float = 1.0 { didSet { uniformSettings["greenAdjustment"] = green } } public init() { - super.init(fragmentShader:RGBAdjustmentFragmentShader, numberOfInputs:1) + super.init(fragmentShader: RGBAdjustmentFragmentShader, numberOfInputs: 1) - ({red = 1.0})() - ({blue = 1.0})() - ({green = 1.0})() + ({ red = 1.0 })() + ({ blue = 1.0 })() + ({ green = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ResizeCrop.swift b/framework/Source/Operations/ResizeCrop.swift index d5e8bb2b..7cc945da 100644 --- a/framework/Source/Operations/ResizeCrop.swift +++ b/framework/Source/Operations/ResizeCrop.swift @@ -54,7 +54,7 @@ public func calculateResizeOutput(inputSize: Size, outputSize: Size?, scaleOutpu (inputSize.height - cropSizeInOrigin.height) / 2 / inputSize.height) } else { finalCropSize = inputSize - normalizedOffsetFromOrigin = Position.zero + normalizedOffsetFromOrigin = Position.zero normalizedCropSize = Size(width: 1, height: 1) } @@ -66,11 +66,11 @@ open class ResizeCrop: BasicOperation { public var cropSizeInPixels: Size? public init() { - super.init(fragmentShader:PassthroughFragmentShader, numberOfInputs:1) + super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1) } override open func renderFrame() { - let inputFramebuffer:Framebuffer = inputFramebuffers[0]! + let inputFramebuffer: Framebuffer = inputFramebuffers[0]! let inputGLSize = inputFramebuffer.sizeForTargetOrientation(.portrait) let inputSize = Size(inputGLSize) @@ -81,11 +81,11 @@ open class ResizeCrop: BasicOperation { size: GLSize(resizeOutputInfo.finalCropSize), stencil: false) - let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:resizeOutputInfo.normalizedOffsetFromOrigin, cropSize:resizeOutputInfo.normalizedCropSize), texture:inputFramebuffer.texture) + let textureProperties = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin: resizeOutputInfo.normalizedOffsetFromOrigin, cropSize: resizeOutputInfo.normalizedCropSize), texture: inputFramebuffer.texture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[textureProperties]) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [textureProperties]) releaseIncomingFramebuffers() } } diff --git a/framework/Source/Operations/SaturationAdjustment.swift b/framework/Source/Operations/SaturationAdjustment.swift index 147867fc..1c30eacc 100755 --- a/framework/Source/Operations/SaturationAdjustment.swift +++ b/framework/Source/Operations/SaturationAdjustment.swift @@ -1,9 +1,9 @@ public class SaturationAdjustment: BasicOperation { - public var saturation:Float = 1.0 { didSet { uniformSettings["saturation"] = saturation } } + public var saturation: Float = 1.0 { didSet { uniformSettings["saturation"] = saturation } } public init() { - super.init(fragmentShader:SaturationFragmentShader, numberOfInputs:1) + super.init(fragmentShader: SaturationFragmentShader, numberOfInputs: 1) - ({saturation = 1.0})() + ({ saturation = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SaturationBlend.swift b/framework/Source/Operations/SaturationBlend.swift index df826917..b6d491de 100644 --- a/framework/Source/Operations/SaturationBlend.swift +++ b/framework/Source/Operations/SaturationBlend.swift @@ -1,5 +1,5 @@ public class SaturationBlend: BasicOperation { public init() { - super.init(fragmentShader:SaturationBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: SaturationBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ScreenBlend.swift b/framework/Source/Operations/ScreenBlend.swift index 050dbcf4..b892fb9f 100644 --- a/framework/Source/Operations/ScreenBlend.swift +++ b/framework/Source/Operations/ScreenBlend.swift @@ -1,5 +1,5 @@ public class ScreenBlend: BasicOperation { public init() { - super.init(fragmentShader:ScreenBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: ScreenBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SepiaToneFilter.swift b/framework/Source/Operations/SepiaToneFilter.swift index bf30137d..404698f1 100644 --- a/framework/Source/Operations/SepiaToneFilter.swift +++ b/framework/Source/Operations/SepiaToneFilter.swift @@ -2,9 +2,9 @@ public class SepiaToneFilter: ColorMatrixFilter { override public init() { super.init() - ({colorMatrix = Matrix4x4(rowMajorValues:[0.3588, 0.7044, 0.1368, 0.0, + ({colorMatrix = Matrix4x4(rowMajorValues: [0.3588, 0.7044, 0.1368, 0.0, 0.2990, 0.5870, 0.1140, 0.0, - 0.2392, 0.4696, 0.0912 ,0.0, + 0.2392, 0.4696, 0.0912, 0.0, 0.0, 0.0, 0.0, 1.0])})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Sharpen.swift b/framework/Source/Operations/Sharpen.swift index 770d579d..24acc6bf 100644 --- a/framework/Source/Operations/Sharpen.swift +++ b/framework/Source/Operations/Sharpen.swift @@ -1,16 +1,16 @@ public class Sharpen: BasicOperation { - public var sharpness:Float = 0.0 { didSet { uniformSettings["sharpness"] = sharpness } } - public var overriddenTexelSize:Size? + public var sharpness: Float = 0.0 { didSet { uniformSettings["sharpness"] = sharpness } } + public var overriddenTexelSize: Size? public init() { - super.init(vertexShader:SharpenVertexShader, fragmentShader:SharpenFragmentShader, numberOfInputs:1) + super.init(vertexShader: SharpenVertexShader, fragmentShader: SharpenFragmentShader, numberOfInputs: 1) - ({sharpness = 0.0})() + ({ sharpness = 0.0 })() } - override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for:outputRotation) + let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for: outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height } diff --git a/framework/Source/Operations/ShiTomasiFeatureDetector.swift b/framework/Source/Operations/ShiTomasiFeatureDetector.swift index 4e4c33b9..f8aff136 100644 --- a/framework/Source/Operations/ShiTomasiFeatureDetector.swift +++ b/framework/Source/Operations/ShiTomasiFeatureDetector.swift @@ -6,6 +6,6 @@ public class ShiTomasiFeatureDetector: HarrisCornerDetector { public init() { - super.init(fragmentShader:ShiTomasiFeatureDetectorFragmentShader) + super.init(fragmentShader: ShiTomasiFeatureDetectorFragmentShader) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SingleComponentGaussianBlur.swift b/framework/Source/Operations/SingleComponentGaussianBlur.swift index 919210ef..33c94c05 100644 --- a/framework/Source/Operations/SingleComponentGaussianBlur.swift +++ b/framework/Source/Operations/SingleComponentGaussianBlur.swift @@ -1,11 +1,11 @@ public class SingleComponentGaussianBlur: TwoStageOperation { - public var blurRadiusInPixels:Float { + public var blurRadiusInPixels: Float { didSet { - let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) + let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit: 8.0, override: overrideDownsamplingOptimization) sharedImageProcessingContext.runOperationAsynchronously { self.downsamplingFactor = downsamplingFactor let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)), fragmentShader:fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)))} + self.shader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma)), fragmentShader: fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma: Double(sigma))) } } } } @@ -13,16 +13,16 @@ public class SingleComponentGaussianBlur: TwoStageOperation { public init() { blurRadiusInPixels = 2.0 let pixelRadius = pixelRadiusForBlurSigma(Double(blurRadiusInPixels)) - let initialShader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:2.0), fragmentShader:fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma:2.0))} - super.init(shader:initialShader, numberOfInputs:1) + let initialShader = crashOnShaderCompileFailure("GaussianBlur") { try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma: 2.0), fragmentShader: fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma: 2.0)) } + super.init(shader: initialShader, numberOfInputs: 1) } } -func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { - guard (radius > 0) else { return PassthroughFragmentShader } +func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(_ radius: UInt, sigma: Double) -> String { + guard radius > 0 else { return PassthroughFragmentShader } - let standardWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) + let standardWeights = standardGaussianWeightsForRadius(radius, sigma: sigma) let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) let trueNumberOfOptimizedOffsets = radius / 2 + (radius % 2) @@ -45,7 +45,7 @@ func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(_ radius:UInt } // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader - if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets) { + if trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets { #if GLES shaderString += "highp vec2 singleStepOffset = vec2(texelWidth, texelHeight);\n" #else @@ -54,8 +54,8 @@ func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(_ radius:UInt } for currentOverlowTextureRead in numberOfOptimizedOffsets.. self.gaussianBlur --> self.toonFilter --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SobelEdgeDetection.swift b/framework/Source/Operations/SobelEdgeDetection.swift index 161561f2..1948c1ca 100644 --- a/framework/Source/Operations/SobelEdgeDetection.swift +++ b/framework/Source/Operations/SobelEdgeDetection.swift @@ -1,9 +1,9 @@ public class SobelEdgeDetection: TextureSamplingOperation { - public var edgeStrength:Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } + public var edgeStrength: Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } public init() { - super.init(fragmentShader:SobelEdgeDetectionFragmentShader) + super.init(fragmentShader: SobelEdgeDetectionFragmentShader) - ({edgeStrength = 1.0})() + ({ edgeStrength = 1.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SoftElegance.swift b/framework/Source/Operations/SoftElegance.swift index 8cec3af0..584ec786 100755 --- a/framework/Source/Operations/SoftElegance.swift +++ b/framework/Source/Operations/SoftElegance.swift @@ -8,12 +8,11 @@ public class SoftElegance: OperationGroup { public override init() { super.init() - self.configureGroup{input, output in + self.configureGroup {input, output in do { - self.lookup1.lookupImage = try PictureInput(imageName:"lookup_soft_elegance_1.png") - self.lookup2.lookupImage = try PictureInput(imageName:"lookup_soft_elegance_2.png") - } - catch { + self.lookup1.lookupImage = try PictureInput(imageName: "lookup_soft_elegance_1.png") + self.lookup2.lookupImage = try PictureInput(imageName: "lookup_soft_elegance_2.png") + } catch { print("ERROR: Unable to create PictureInput \(error)") } self.gaussianBlur.blurRadiusInPixels = 10.0 diff --git a/framework/Source/Operations/SoftLightBlend.swift b/framework/Source/Operations/SoftLightBlend.swift index 365f0587..e33c4a7b 100644 --- a/framework/Source/Operations/SoftLightBlend.swift +++ b/framework/Source/Operations/SoftLightBlend.swift @@ -1,5 +1,5 @@ public class SoftLightBlend: BasicOperation { public init() { - super.init(fragmentShader:SoftLightBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: SoftLightBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Solarize.swift b/framework/Source/Operations/Solarize.swift index 94a0999d..773c268d 100644 --- a/framework/Source/Operations/Solarize.swift +++ b/framework/Source/Operations/Solarize.swift @@ -1,9 +1,9 @@ public class Solarize: BasicOperation { - public var threshold:Float = 0.5 { didSet { uniformSettings["threshold"] = threshold } } + public var threshold: Float = 0.5 { didSet { uniformSettings["threshold"] = threshold } } public init() { - super.init(fragmentShader:SolarizeFragmentShader, numberOfInputs:1) + super.init(fragmentShader: SolarizeFragmentShader, numberOfInputs: 1) - ({threshold = 0.5})() + ({ threshold = 0.5 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SolidColorGenerator.swift b/framework/Source/Operations/SolidColorGenerator.swift index 67facd28..c9a55c34 100644 --- a/framework/Source/Operations/SolidColorGenerator.swift +++ b/framework/Source/Operations/SolidColorGenerator.swift @@ -1,6 +1,5 @@ public class SolidColorGenerator: ImageGenerator { - - public func renderColor(_ color:Color) { + public func renderColor(_ color: Color) { imageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(color) diff --git a/framework/Source/Operations/SourceOverBlend.swift b/framework/Source/Operations/SourceOverBlend.swift index 636b8f87..e7c0945b 100644 --- a/framework/Source/Operations/SourceOverBlend.swift +++ b/framework/Source/Operations/SourceOverBlend.swift @@ -1,5 +1,5 @@ public class SourceOverBlend: BasicOperation { public init() { - super.init(fragmentShader:SourceOverBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: SourceOverBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SphereRefraction.swift b/framework/Source/Operations/SphereRefraction.swift index 617e142b..9d0dffd4 100644 --- a/framework/Source/Operations/SphereRefraction.swift +++ b/framework/Source/Operations/SphereRefraction.swift @@ -1,15 +1,15 @@ public class SphereRefraction: BasicOperation { - public var radius:Float = 0.25 { didSet { uniformSettings["radius"] = radius } } - public var refractiveIndex:Float = 0.71 { didSet { uniformSettings["refractiveIndex"] = refractiveIndex } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var radius: Float = 0.25 { didSet { uniformSettings["radius"] = radius } } + public var refractiveIndex: Float = 0.71 { didSet { uniformSettings["refractiveIndex"] = refractiveIndex } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:SphereRefractionFragmentShader, numberOfInputs:1) + super.init(fragmentShader: SphereRefractionFragmentShader, numberOfInputs: 1) - ({radius = 0.25})() - ({refractiveIndex = 0.71})() - ({center = Position.center})() + ({ radius = 0.25 })() + ({ refractiveIndex = 0.71 })() + ({ center = Position.center })() - self.backgroundColor = Color(red:0.0, green:0.0, blue:0.0, alpha:0.0) + self.backgroundColor = Color(red: 0.0, green: 0.0, blue: 0.0, alpha: 0.0) } } diff --git a/framework/Source/Operations/StretchDistortion.swift b/framework/Source/Operations/StretchDistortion.swift index 4e3a4a93..d61a1bee 100644 --- a/framework/Source/Operations/StretchDistortion.swift +++ b/framework/Source/Operations/StretchDistortion.swift @@ -1,9 +1,9 @@ public class StretchDistortion: BasicOperation { - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:StretchDistortionFragmentShader, numberOfInputs:1) + super.init(fragmentShader: StretchDistortionFragmentShader, numberOfInputs: 1) - ({center = Position.center})() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/SubtractBlend.swift b/framework/Source/Operations/SubtractBlend.swift index 556a4e1f..7adac75d 100644 --- a/framework/Source/Operations/SubtractBlend.swift +++ b/framework/Source/Operations/SubtractBlend.swift @@ -1,5 +1,5 @@ public class SubtractBlend: BasicOperation { public init() { - super.init(fragmentShader:SubtractBlendFragmentShader, numberOfInputs:2) + super.init(fragmentShader: SubtractBlendFragmentShader, numberOfInputs: 2) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SwirlDistortion.swift b/framework/Source/Operations/SwirlDistortion.swift index aea785a9..f9c5ae66 100644 --- a/framework/Source/Operations/SwirlDistortion.swift +++ b/framework/Source/Operations/SwirlDistortion.swift @@ -1,13 +1,13 @@ public class SwirlDistortion: BasicOperation { - public var radius:Float = 0.5 { didSet { uniformSettings["radius"] = radius } } - public var angle:Float = 1.0 { didSet { uniformSettings["angle"] = angle } } - public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } + public var radius: Float = 0.5 { didSet { uniformSettings["radius"] = radius } } + public var angle: Float = 1.0 { didSet { uniformSettings["angle"] = angle } } + public var center = Position.center { didSet { uniformSettings["center"] = center } } public init() { - super.init(fragmentShader:SwirlFragmentShader, numberOfInputs:1) + super.init(fragmentShader: SwirlFragmentShader, numberOfInputs: 1) - ({radius = 0.5})() - ({angle = 1.0})() - ({center = Position.center})() + ({ radius = 0.5 })() + ({ angle = 1.0 })() + ({ center = Position.center })() } } diff --git a/framework/Source/Operations/ThresholdSketch.swift b/framework/Source/Operations/ThresholdSketch.swift index 0df3afe7..fff109c1 100644 --- a/framework/Source/Operations/ThresholdSketch.swift +++ b/framework/Source/Operations/ThresholdSketch.swift @@ -1,11 +1,11 @@ public class ThresholdSketchFilter: TextureSamplingOperation { - public var edgeStrength:Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } - public var threshold:Float = 0.25 { didSet { uniformSettings["threshold"] = threshold } } + public var edgeStrength: Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } + public var threshold: Float = 0.25 { didSet { uniformSettings["threshold"] = threshold } } public init() { - super.init(fragmentShader:ThresholdSketchFragmentShader) + super.init(fragmentShader: ThresholdSketchFragmentShader) - ({edgeStrength = 1.0})() - ({threshold = 0.25})() + ({ edgeStrength = 1.0 })() + ({ threshold = 0.25 })() } } diff --git a/framework/Source/Operations/ThresholdSobelEdgeDetection.swift b/framework/Source/Operations/ThresholdSobelEdgeDetection.swift index 7225b4c9..f16462e6 100644 --- a/framework/Source/Operations/ThresholdSobelEdgeDetection.swift +++ b/framework/Source/Operations/ThresholdSobelEdgeDetection.swift @@ -1,11 +1,11 @@ public class ThresholdSobelEdgeDetection: TextureSamplingOperation { - public var edgeStrength:Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } - public var threshold:Float = 0.25 { didSet { uniformSettings["threshold"] = threshold } } + public var edgeStrength: Float = 1.0 { didSet { uniformSettings["edgeStrength"] = edgeStrength } } + public var threshold: Float = 0.25 { didSet { uniformSettings["threshold"] = threshold } } public init() { - super.init(fragmentShader:ThresholdEdgeDetectionFragmentShader) + super.init(fragmentShader: ThresholdEdgeDetectionFragmentShader) - ({edgeStrength = 1.0})() - ({threshold = 0.25})() + ({ edgeStrength = 1.0 })() + ({ threshold = 0.25 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/TiltShift.swift b/framework/Source/Operations/TiltShift.swift index 966d7c74..0db0b216 100644 --- a/framework/Source/Operations/TiltShift.swift +++ b/framework/Source/Operations/TiltShift.swift @@ -1,23 +1,23 @@ public class TiltShift: OperationGroup { - public var blurRadiusInPixels:Float = 7.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } - public var topFocusLevel:Float = 0.4 { didSet { tiltShift.uniformSettings["topFocusLevel"] = topFocusLevel } } - public var bottomFocusLevel:Float = 0.6 { didSet { tiltShift.uniformSettings["bottomFocusLevel"] = bottomFocusLevel } } - public var focusFallOffRate:Float = 0.2 { didSet { tiltShift.uniformSettings["focusFallOffRate"] = focusFallOffRate } } + public var blurRadiusInPixels: Float = 7.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var topFocusLevel: Float = 0.4 { didSet { tiltShift.uniformSettings["topFocusLevel"] = topFocusLevel } } + public var bottomFocusLevel: Float = 0.6 { didSet { tiltShift.uniformSettings["bottomFocusLevel"] = bottomFocusLevel } } + public var focusFallOffRate: Float = 0.2 { didSet { tiltShift.uniformSettings["focusFallOffRate"] = focusFallOffRate } } let gaussianBlur = GaussianBlur() - let tiltShift = BasicOperation(fragmentShader:TiltShiftFragmentShader, numberOfInputs:2) + let tiltShift = BasicOperation(fragmentShader: TiltShiftFragmentShader, numberOfInputs: 2) public override init() { super.init() - ({blurRadiusInPixels = 7.0})() - ({topFocusLevel = 0.4})() - ({bottomFocusLevel = 0.6})() - ({focusFallOffRate = 0.2})() + ({ blurRadiusInPixels = 7.0 })() + ({ topFocusLevel = 0.4 })() + ({ bottomFocusLevel = 0.6 })() + ({ focusFallOffRate = 0.2 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.tiltShift --> output input --> self.gaussianBlur --> self.tiltShift } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ToonFilter.swift b/framework/Source/Operations/ToonFilter.swift index 091067dd..c5a0a9a2 100644 --- a/framework/Source/Operations/ToonFilter.swift +++ b/framework/Source/Operations/ToonFilter.swift @@ -1,11 +1,11 @@ public class ToonFilter: TextureSamplingOperation { - public var threshold:Float = 0.2 { didSet { uniformSettings["threshold"] = threshold } } - public var quantizationLevels:Float = 10.0 { didSet { uniformSettings["quantizationLevels"] = quantizationLevels } } + public var threshold: Float = 0.2 { didSet { uniformSettings["threshold"] = threshold } } + public var quantizationLevels: Float = 10.0 { didSet { uniformSettings["quantizationLevels"] = quantizationLevels } } public init() { - super.init(fragmentShader:ToonFragmentShader) + super.init(fragmentShader: ToonFragmentShader) - ({threshold = 0.2})() - ({quantizationLevels = 10.0})() + ({ threshold = 0.2 })() + ({ quantizationLevels = 10.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/TransformOperation.swift b/framework/Source/Operations/TransformOperation.swift index 1dcc73f8..b3a58cc0 100644 --- a/framework/Source/Operations/TransformOperation.swift +++ b/framework/Source/Operations/TransformOperation.swift @@ -13,43 +13,40 @@ #endif open class TransformOperation: BasicOperation { - public var transform:Matrix4x4 = Matrix4x4.identity { didSet { uniformSettings["transformMatrix"] = transform } } + public var transform = Matrix4x4.identity { didSet { uniformSettings["transformMatrix"] = transform } } public var anchorTopLeft = false public var ignoreAspectRatio = false - var normalizedImageVertices:[GLfloat]! + var normalizedImageVertices: [GLfloat]! public init() { - super.init(vertexShader:TransformVertexShader, fragmentShader:PassthroughFragmentShader, numberOfInputs:1) + super.init(vertexShader: TransformVertexShader, fragmentShader: PassthroughFragmentShader, numberOfInputs: 1) - ({transform = Matrix4x4.identity})() + ({ transform = Matrix4x4.identity })() } - override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:normalizedImageVertices, inputTextures:textureProperties) + override func internalRenderFunction(_ inputFramebuffer: Framebuffer, textureProperties: [InputTextureProperties]) { + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertices: normalizedImageVertices, inputTextures: textureProperties) releaseIncomingFramebuffers() } - override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) var aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) - if(ignoreAspectRatio) { + if ignoreAspectRatio { aspectRatio = 1 } - let orthoMatrix = orthographicMatrix(-1.0, right:1.0, bottom:-1.0 * aspectRatio, top:1.0 * aspectRatio, near:-1.0, far:1.0, anchorTopLeft:anchorTopLeft) + let orthoMatrix = orthographicMatrix(-1.0, right: 1.0, bottom: -1.0 * aspectRatio, top: 1.0 * aspectRatio, near: -1.0, far: 1.0, anchorTopLeft: anchorTopLeft) normalizedImageVertices = normalizedImageVerticesForAspectRatio(aspectRatio) uniformSettings["orthographicMatrix"] = orthoMatrix } - func normalizedImageVerticesForAspectRatio(_ aspectRatio:Float) -> [GLfloat] { + func normalizedImageVerticesForAspectRatio(_ aspectRatio: Float) -> [GLfloat] { // [TopLeft.x, TopLeft.y, TopRight.x, TopRight.y, BottomLeft.x, BottomLeft.y, BottomRight.x, BottomRight.y] - if(anchorTopLeft) { - return [0.0, 0.0, 1.0, 0.0, 0.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] - } - else { - return [-1.0, GLfloat(-aspectRatio), 1.0, GLfloat(-aspectRatio), -1.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] + if anchorTopLeft { + return [0.0, 0.0, 1.0, 0.0, 0.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] + } else { + return [-1.0, GLfloat(-aspectRatio), 1.0, GLfloat(-aspectRatio), -1.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] } } } - - diff --git a/framework/Source/Operations/UnsharpMask.swift b/framework/Source/Operations/UnsharpMask.swift index ecda7dcf..2d1793aa 100644 --- a/framework/Source/Operations/UnsharpMask.swift +++ b/framework/Source/Operations/UnsharpMask.swift @@ -3,17 +3,17 @@ public class UnsharpMask: OperationGroup { public var intensity: Float = 1.0 { didSet { unsharpMask.uniformSettings["intensity"] = intensity } } let gaussianBlur = GaussianBlur() - let unsharpMask = BasicOperation(fragmentShader:UnsharpMaskFragmentShader, numberOfInputs:2) + let unsharpMask = BasicOperation(fragmentShader: UnsharpMaskFragmentShader, numberOfInputs: 2) public override init() { blurRadiusInPixels = 4.0 super.init() - ({intensity = 1.0})() + ({ intensity = 1.0 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.unsharpMask input --> self.gaussianBlur --> self.unsharpMask --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Vibrance.swift b/framework/Source/Operations/Vibrance.swift index 8b7671d9..13ead009 100644 --- a/framework/Source/Operations/Vibrance.swift +++ b/framework/Source/Operations/Vibrance.swift @@ -1,9 +1,9 @@ public class Vibrance: BasicOperation { - public var vibrance:Float = 0.0 { didSet { uniformSettings["vibrance"] = vibrance } } + public var vibrance: Float = 0.0 { didSet { uniformSettings["vibrance"] = vibrance } } public init() { - super.init(fragmentShader:VibranceFragmentShader, numberOfInputs:1) + super.init(fragmentShader: VibranceFragmentShader, numberOfInputs: 1) - ({vibrance = 0.0})() + ({ vibrance = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Vignette.swift b/framework/Source/Operations/Vignette.swift index c027a544..1b2afa11 100644 --- a/framework/Source/Operations/Vignette.swift +++ b/framework/Source/Operations/Vignette.swift @@ -1,15 +1,15 @@ public class Vignette: BasicOperation { - public var center:Position = Position.center { didSet { uniformSettings["vignetteCenter"] = center } } - public var color:Color = Color.black { didSet { uniformSettings["vignetteColor"] = color } } - public var start:Float = 0.3 { didSet { uniformSettings["vignetteStart"] = start } } - public var end:Float = 0.75 { didSet { uniformSettings["vignetteEnd"] = end } } + public var center = Position.center { didSet { uniformSettings["vignetteCenter"] = center } } + public var color = Color.black { didSet { uniformSettings["vignetteColor"] = color } } + public var start: Float = 0.3 { didSet { uniformSettings["vignetteStart"] = start } } + public var end: Float = 0.75 { didSet { uniformSettings["vignetteEnd"] = end } } public init() { - super.init(fragmentShader:VignetteFragmentShader, numberOfInputs:1) + super.init(fragmentShader: VignetteFragmentShader, numberOfInputs: 1) - ({center = Position.center})() - ({color = Color.black})() - ({start = 0.3})() - ({end = 0.75})() + ({ center = Position.center })() + ({ color = Color.black })() + ({ start = 0.3 })() + ({ end = 0.75 })() } } diff --git a/framework/Source/Operations/WhiteBalance.swift b/framework/Source/Operations/WhiteBalance.swift index 5335d495..1d205a46 100644 --- a/framework/Source/Operations/WhiteBalance.swift +++ b/framework/Source/Operations/WhiteBalance.swift @@ -1,11 +1,11 @@ public class WhiteBalance: BasicOperation { - public var temperature:Float = 5000.0 { didSet { uniformSettings["temperature"] = temperature < 5000.0 ? 0.0004 * (temperature - 5000.0) : 0.00006 * (temperature - 5000.0) } } - public var tint:Float = 0.0 { didSet { uniformSettings["tint"] = tint / 100.0 } } + public var temperature: Float = 5000.0 { didSet { uniformSettings["temperature"] = temperature < 5000.0 ? 0.0004 * (temperature - 5000.0) : 0.00006 * (temperature - 5000.0) } } + public var tint: Float = 0.0 { didSet { uniformSettings["tint"] = tint / 100.0 } } public init() { - super.init(fragmentShader:WhiteBalanceFragmentShader, numberOfInputs:1) + super.init(fragmentShader: WhiteBalanceFragmentShader, numberOfInputs: 1) - ({temperature = 5000.0})() - ({tint = 0.0})() + ({ temperature = 5000.0 })() + ({ tint = 0.0 })() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ZoomBlur.swift b/framework/Source/Operations/ZoomBlur.swift index 627fe060..d77789fc 100644 --- a/framework/Source/Operations/ZoomBlur.swift +++ b/framework/Source/Operations/ZoomBlur.swift @@ -1,11 +1,11 @@ public class ZoomBlur: BasicOperation { - public var blurSize:Float = 1.0 { didSet { uniformSettings["blurSize"] = blurSize } } - public var blurCenter:Position = Position.center { didSet { uniformSettings["blurCenter"] = blurCenter } } + public var blurSize: Float = 1.0 { didSet { uniformSettings["blurSize"] = blurSize } } + public var blurCenter = Position.center { didSet { uniformSettings["blurCenter"] = blurCenter } } public init() { - super.init(fragmentShader:ZoomBlurFragmentShader, numberOfInputs:1) + super.init(fragmentShader: ZoomBlurFragmentShader, numberOfInputs: 1) - ({blurSize = 1.0})() - ({blurCenter = Position.center})() + ({ blurSize = 1.0 })() + ({ blurCenter = Position.center })() } } diff --git a/framework/Source/Operations/iOSBlur.swift b/framework/Source/Operations/iOSBlur.swift index a5478078..707a2b52 100644 --- a/framework/Source/Operations/iOSBlur.swift +++ b/framework/Source/Operations/iOSBlur.swift @@ -1,7 +1,7 @@ public class iOSBlur: OperationGroup { - public var blurRadiusInPixels:Float = 48.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } - public var saturation:Float = 0.8 { didSet { saturationFilter.saturation = saturation } } - public var rangeReductionFactor:Float = 0.6 { didSet { luminanceRange.rangeReductionFactor = rangeReductionFactor } } + public var blurRadiusInPixels: Float = 48.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } + public var saturation: Float = 0.8 { didSet { saturationFilter.saturation = saturation } } + public var rangeReductionFactor: Float = 0.6 { didSet { luminanceRange.rangeReductionFactor = rangeReductionFactor } } let saturationFilter = SaturationAdjustment() let gaussianBlur = GaussianBlur() @@ -10,12 +10,12 @@ public class iOSBlur: OperationGroup { public override init() { super.init() - ({blurRadiusInPixels = 48.0})() - ({saturation = 0.8})() - ({rangeReductionFactor = 0.6})() + ({ blurRadiusInPixels = 48.0 })() + ({ saturation = 0.8 })() + ({ rangeReductionFactor = 0.6 })() - self.configureGroup{input, output in + self.configureGroup {input, output in input --> self.saturationFilter --> self.gaussianBlur --> self.luminanceRange --> output } } -} \ No newline at end of file +} diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index ff27ae3f..12b364e0 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -6,27 +6,27 @@ public var _needCheckFilterContainerThread: Bool? public protocol ImageSource: AnyObject { var _needCheckSourceThread: Bool { get } - var targets:TargetContainer { get } - func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) + var targets: TargetContainer { get } + func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) } public protocol ImageConsumer: AnyObject { var _needCheckConsumerThread: Bool { get } - var maximumInputs:UInt { get } - var sources:SourceContainer { get } + var maximumInputs: UInt { get } + var sources: SourceContainer { get } - func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) + func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) } public protocol ImageProcessingOperation: ImageConsumer, ImageSource { } infix operator --> : AdditionPrecedence -//precedencegroup ProcessingOperationPrecedence { +// precedencegroup ProcessingOperationPrecedence { // associativity: left //// higherThan: Multiplicative -//} -@discardableResult public func -->(source:ImageSource, destination:T) -> T { +// } +@discardableResult public func -->(source: ImageSource, destination: T) -> T { source.addTarget(destination) return destination } @@ -39,20 +39,20 @@ public extension ImageSource { return _needCheckFilterContainerThread ?? true } - func addTarget(_ target:ImageConsumer, atTargetIndex:UInt? = nil) { + func addTarget(_ target: ImageConsumer, atTargetIndex: UInt? = nil) { if _needCheckSourceThread { __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) } if let targetIndex = atTargetIndex { - target.setSource(self, atIndex:targetIndex) - targets.append(target, indexAtTarget:targetIndex) + target.setSource(self, atIndex: targetIndex) + targets.append(target, indexAtTarget: targetIndex) sharedImageProcessingContext.runOperationAsynchronously { - self.transmitPreviousImage(to:target, atIndex:targetIndex) + self.transmitPreviousImage(to: target, atIndex: targetIndex) } } else if let indexAtTarget = target.addSource(self) { - targets.append(target, indexAtTarget:indexAtTarget) + targets.append(target, indexAtTarget: indexAtTarget) sharedImageProcessingContext.runOperationAsynchronously { - self.transmitPreviousImage(to:target, atIndex:indexAtTarget) + self.transmitPreviousImage(to: target, atIndex: indexAtTarget) } } else { debugPrint("Warning: tried to add target beyond target's input capacity") @@ -69,19 +69,19 @@ public extension ImageSource { targets.removeAll() } - func remove(_ target:ImageConsumer) { + func remove(_ target: ImageConsumer) { if _needCheckSourceThread { __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) } for (testTarget, index) in targets { - if(target === testTarget) { + if target === testTarget { target.removeSourceAtIndex(index) targets.remove(target) } } } - func updateTargetsWithFramebuffer(_ framebuffer:Framebuffer) { + func updateTargetsWithFramebuffer(_ framebuffer: Framebuffer) { var foundTargets = [(ImageConsumer, UInt)]() for target in targets { foundTargets.append(target) @@ -97,7 +97,7 @@ public extension ImageSource { } } for (target, index) in foundTargets { - target.newFramebufferAvailable(framebuffer, fromSourceIndex:index) + target.newFramebufferAvailable(framebuffer, fromSourceIndex: index) } } } @@ -107,21 +107,21 @@ public extension ImageConsumer { return _needCheckFilterContainerThread ?? true } - func addSource(_ source:ImageSource) -> UInt? { + func addSource(_ source: ImageSource) -> UInt? { if _needCheckConsumerThread { __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) } - return sources.append(source, maximumInputs:maximumInputs) + return sources.append(source, maximumInputs: maximumInputs) } - func setSource(_ source:ImageSource, atIndex:UInt) { + func setSource(_ source: ImageSource, atIndex: UInt) { if _needCheckConsumerThread { __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) } - _ = sources.insert(source, atIndex:atIndex, maximumInputs:maximumInputs) + _ = sources.insert(source, atIndex: atIndex, maximumInputs: maximumInputs) } - func removeSourceAtIndex(_ index:UInt) { + func removeSourceAtIndex(_ index: UInt) { if _needCheckConsumerThread { __dispatch_assert_queue(sharedImageProcessingContext.serialDispatchQueue) } @@ -151,33 +151,33 @@ public extension ImageConsumer { } class WeakImageConsumer { - weak var value:ImageConsumer? - let indexAtTarget:UInt - init (value:ImageConsumer, indexAtTarget:UInt) { + weak var value: ImageConsumer? + let indexAtTarget: UInt + init (value: ImageConsumer, indexAtTarget: UInt) { self.indexAtTarget = indexAtTarget self.value = value } } -public class TargetContainer:Sequence { +public class TargetContainer: Sequence { private var targets = [WeakImageConsumer]() - public var count:Int { get { return targets.count } } + public var count: Int { get { return targets.count } } #if !os(Linux) - let dispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.targetContainerQueue", attributes: []) + let dispatchQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.targetContainerQueue", attributes: []) #endif public init() { } - public func append(_ target:ImageConsumer, indexAtTarget:UInt) { + public func append(_ target: ImageConsumer, indexAtTarget: UInt) { // TODO: Don't allow the addition of a target more than once #if os(Linux) - self.targets.append(WeakImageConsumer(value:target, indexAtTarget:indexAtTarget)) + self.targets.append(WeakImageConsumer(value: target, indexAtTarget: indexAtTarget)) #else - dispatchQueue.async{ - self.targets.append(WeakImageConsumer(value:target, indexAtTarget:indexAtTarget)) + dispatchQueue.async { + self.targets.append(WeakImageConsumer(value: target, indexAtTarget: indexAtTarget)) } #endif } @@ -187,15 +187,15 @@ public class TargetContainer:Sequence { return AnyIterator { () -> (ImageConsumer, UInt)? in #if os(Linux) - if (index >= self.targets.count) { + if index >= self.targets.count { return nil } // NOTE: strong retain value, in case the value is released on another thread var retainedValue = self.targets[index].value while retainedValue == nil { - self.targets.remove(at:index) - if (index >= self.targets.count) { + self.targets.remove(at: index) + if index >= self.targets.count { return nil } retainedValue = self.targets[index].value @@ -204,16 +204,16 @@ public class TargetContainer:Sequence { index += 1 return (retainedValue!, self.targets[index - 1].indexAtTarget) #else - return self.dispatchQueue.sync{ - if (index >= self.targets.count) { + return self.dispatchQueue.sync { + if index >= self.targets.count { return nil } // NOTE: strong retain value, in case the value is released on another thread var retainedValue = self.targets[index].value while retainedValue == nil { - self.targets.remove(at:index) - if (index >= self.targets.count) { + self.targets.remove(at: index) + if index >= self.targets.count { return nil } retainedValue = self.targets[index].value @@ -221,7 +221,7 @@ public class TargetContainer:Sequence { index += 1 return (retainedValue!, self.targets[index - 1].indexAtTarget) - } + } #endif } } @@ -230,17 +230,17 @@ public class TargetContainer:Sequence { #if os(Linux) self.targets.removeAll() #else - dispatchQueue.async{ + dispatchQueue.async { self.targets.removeAll() } #endif } - public func remove(_ target:ImageConsumer) { + public func remove(_ target: ImageConsumer) { #if os(Linux) self.targets = self.targets.filter { $0.value !== target } #else - dispatchQueue.async{ + dispatchQueue.async { self.targets = self.targets.filter { $0.value !== target } } #endif @@ -248,15 +248,15 @@ public class TargetContainer:Sequence { } public class SourceContainer { - public var sources:[UInt:ImageSource] = [:] + public var sources: [UInt: ImageSource] = [:] public init() { } - public func append(_ source:ImageSource, maximumInputs:UInt) -> UInt? { - var currentIndex:UInt = 0 + public func append(_ source: ImageSource, maximumInputs: UInt) -> UInt? { + var currentIndex: UInt = 0 while currentIndex < maximumInputs { - if (sources[currentIndex] == nil) { + if sources[currentIndex] == nil { sources[currentIndex] = source return currentIndex } @@ -266,50 +266,50 @@ public class SourceContainer { return nil } - public func insert(_ source:ImageSource, atIndex:UInt, maximumInputs:UInt) -> UInt { - guard (atIndex < maximumInputs) else { fatalError("ERROR: Attempted to set a source beyond the maximum number of inputs on this operation") } + public func insert(_ source: ImageSource, atIndex: UInt, maximumInputs: UInt) -> UInt { + guard atIndex < maximumInputs else { fatalError("ERROR: Attempted to set a source beyond the maximum number of inputs on this operation") } sources[atIndex] = source return atIndex } - public func removeAtIndex(_ index:UInt) { + public func removeAtIndex(_ index: UInt) { sources[index] = nil } } public class ImageRelay: ImageProcessingOperation { - public var newImageCallback:((Framebuffer) -> ())? + public var newImageCallback: ((Framebuffer) -> Void)? public let sources = SourceContainer() public let targets = TargetContainer() - public let maximumInputs:UInt = 1 - public var preventRelay:Bool = false + public let maximumInputs: UInt = 1 + public var preventRelay: Bool = false public init() { } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { guard sources.sources.count > 0 else { return } - sources.sources[0]?.transmitPreviousImage(to:self, atIndex:0) + sources.sources[0]?.transmitPreviousImage(to: self, atIndex: 0) } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { if let newImageCallback = newImageCallback { newImageCallback(framebuffer) } - if (!preventRelay) { + if !preventRelay { relayFramebufferOnward(framebuffer) } } - public func relayFramebufferOnward(_ framebuffer:Framebuffer) { + public func relayFramebufferOnward(_ framebuffer: Framebuffer) { // Need to override to guarantee a removal of the previously applied lock for _ in targets { framebuffer.lock() } framebuffer.unlock() for (target, index) in targets { - target.newFramebufferAvailable(framebuffer, fromSourceIndex:index) + target.newFramebufferAvailable(framebuffer, fromSourceIndex: index) } } } diff --git a/framework/Source/Position.swift b/framework/Source/Position.swift index 1b640783..778354e3 100644 --- a/framework/Source/Position.swift +++ b/framework/Source/Position.swift @@ -5,18 +5,18 @@ import UIKit #endif public struct Position { - public let x:Float - public let y:Float - public let z:Float? + public let x: Float + public let y: Float + public let z: Float? - public init (_ x:Float, _ y:Float, _ z:Float? = nil) { + public init (_ x: Float, _ y: Float, _ z: Float? = nil) { self.x = x self.y = y self.z = z } #if !os(Linux) - public init(point:CGPoint) { + public init(point: CGPoint) { self.x = Float(point.x) self.y = Float(point.y) self.z = nil diff --git a/framework/Source/RawDataInput.swift b/framework/Source/RawDataInput.swift index c918045f..a61dbba9 100644 --- a/framework/Source/RawDataInput.swift +++ b/framework/Source/RawDataInput.swift @@ -33,11 +33,10 @@ public class RawDataInput: ImageSource { public let targets = TargetContainer() public init() { - } - public func uploadBytes(_ bytes:[UInt8], size:Size, pixelFormat:PixelFormat, orientation:ImageOrientation = .portrait) { - let dataFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:orientation, size:GLSize(size), textureOnly:true, internalFormat:pixelFormat.toGL(), format:pixelFormat.toGL()) + public func uploadBytes(_ bytes: [UInt8], size: Size, pixelFormat: PixelFormat, orientation: ImageOrientation = .portrait) { + let dataFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: orientation, size: GLSize(size), textureOnly: true, internalFormat: pixelFormat.toGL(), format: pixelFormat.toGL()) glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), dataFramebuffer.texture) @@ -46,7 +45,7 @@ public class RawDataInput: ImageSource { updateTargetsWithFramebuffer(dataFramebuffer) } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // TODO: Determine if this is necessary for the raw data uploads } } diff --git a/framework/Source/RawDataOutput.swift b/framework/Source/RawDataOutput.swift index dac09d7b..e2fb1290 100644 --- a/framework/Source/RawDataOutput.swift +++ b/framework/Source/RawDataOutput.swift @@ -13,25 +13,25 @@ #endif public class RawDataOutput: ImageConsumer { - public var dataAvailableCallback:(([UInt8]) -> ())? + public var dataAvailableCallback: (([UInt8]) -> Void)? public let sources = SourceContainer() - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 public init() { } // TODO: Replace with texture caches - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: framebuffer.orientation, size: framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() - var data = [UInt8](repeating:0, count:Int(framebuffer.size.width * framebuffer.size.height * 4)) + var data = [UInt8](repeating: 0, count: Int(framebuffer.size.width * framebuffer.size.height * 4)) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), &data) renderFramebuffer.unlock() diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 3b0bd43d..c408fbd0 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -8,7 +8,7 @@ protocol SerialDispatch { } extension SerialDispatch { - func runOperationAsynchronously(operation:() -> ()) { + func runOperationAsynchronously(operation:() -> Void) { operation() } @@ -19,7 +19,7 @@ extension SerialDispatch { #else -public var standardProcessingQueue:DispatchQueue { +public var standardProcessingQueue: DispatchQueue { if #available(iOS 10, OSX 10.10, *) { return DispatchQueue.global(qos: .default) } else { @@ -27,7 +27,7 @@ public var standardProcessingQueue:DispatchQueue { } } -public var lowProcessingQueue:DispatchQueue { +public var lowProcessingQueue: DispatchQueue { if #available(iOS 10, OSX 10.10, *) { return DispatchQueue.global(qos: .background) } else { @@ -35,19 +35,19 @@ public var lowProcessingQueue:DispatchQueue { } } -func runAsynchronouslyOnMainQueue(_ mainThreadOperation:@escaping () -> ()) { - if (Thread.isMainThread) { +func runAsynchronouslyOnMainQueue(_ mainThreadOperation:@escaping () -> Void) { + if Thread.isMainThread { mainThreadOperation() } else { - DispatchQueue.main.async(execute:mainThreadOperation) + DispatchQueue.main.async(execute: mainThreadOperation) } } -func runOnMainQueue(_ mainThreadOperation:() -> ()) { - if (Thread.isMainThread) { +func runOnMainQueue(_ mainThreadOperation:() -> Void) { + if Thread.isMainThread { mainThreadOperation() } else { - DispatchQueue.main.sync(execute:mainThreadOperation) + DispatchQueue.main.sync(execute: mainThreadOperation) } } @@ -63,10 +63,10 @@ func runOnMainQueue(_ mainThreadOperation:() -> T) -> T { // MARK: SerialDispatch extension public protocol SerialDispatch: class { - var executeStartTime:TimeInterval? { get set } - var serialDispatchQueue:DispatchQueue { get } - var dispatchQueueKey:DispatchSpecificKey { get } - var dispatchQueueKeyValue:Int { get } + var executeStartTime: TimeInterval? { get set } + var serialDispatchQueue: DispatchQueue { get } + var dispatchQueueKey: DispatchSpecificKey { get } + var dispatchQueueKeyValue: Int { get } func makeCurrentContext() } @@ -79,7 +79,7 @@ public extension SerialDispatch { } } - func runOperationAsynchronously(_ operation:@escaping () -> ()) { + func runOperationAsynchronously(_ operation:@escaping () -> Void) { self.serialDispatchQueue.async { self.executeStartTime = CACurrentMediaTime() self.makeCurrentContext() @@ -88,9 +88,9 @@ public extension SerialDispatch { } } - func runOperationSynchronously(_ operation:() -> ()) { + func runOperationSynchronously(_ operation:() -> Void) { // TODO: Verify this works as intended - if (DispatchQueue.getSpecific(key:self.dispatchQueueKey) == self.dispatchQueueKeyValue) { + if DispatchQueue.getSpecific(key: self.dispatchQueueKey) == self.dispatchQueueKeyValue { operation() } else { self.serialDispatchQueue.sync { @@ -102,8 +102,8 @@ public extension SerialDispatch { } } - func runOperationSynchronously(_ operation:() throws -> ()) throws { - var caughtError:Error? = nil + func runOperationSynchronously(_ operation:() throws -> Void) throws { + var caughtError: Error? runOperationSynchronously { do { try operation() @@ -111,7 +111,7 @@ public extension SerialDispatch { caughtError = error } } - if (caughtError != nil) {throw caughtError!} + if caughtError != nil { throw caughtError! } } func runOperationSynchronously(_ operation:() throws -> T) throws -> T { diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index 6b3f6dea..ab4be862 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -14,9 +14,8 @@ import Foundation - -struct ShaderCompileError:Error { - let compileLog:String +struct ShaderCompileError: Error { + let compileLog: String } enum ShaderType { @@ -27,23 +26,23 @@ enum ShaderType { public class ShaderProgram { public var colorUniformsUseFourComponents = false public static var disableAttributeCache: Bool = false - let program:GLuint - var vertexShader:GLuint! // At some point, the Swift compiler will be able to deal with the early throw and we can convert these to lets - var fragmentShader:GLuint! - private var attributeAddresses = [String:GLuint]() - private var uniformAddresses = [String:GLint]() - private var currentUniformIntValues = [String:GLint]() - private var currentUniformFloatValues = [String:GLfloat]() - private var currentUniformFloatArrayValues = [String:[GLfloat]]() + let program: GLuint + var vertexShader: GLuint! // At some point, the Swift compiler will be able to deal with the early throw and we can convert these to lets + var fragmentShader: GLuint! + private var attributeAddresses = [String: GLuint]() + private var uniformAddresses = [String: GLint]() + private var currentUniformIntValues = [String: GLint]() + private var currentUniformFloatValues = [String: GLfloat]() + private var currentUniformFloatArrayValues = [String: [GLfloat]]() // MARK: - // MARK: Initialization and teardown - public init(vertexShader:String, fragmentShader:String) throws { + public init(vertexShader: String, fragmentShader: String) throws { program = glCreateProgram() - self.vertexShader = try compileShader(vertexShader, type:.vertex) - self.fragmentShader = try compileShader(fragmentShader, type:.fragment) + self.vertexShader = try compileShader(vertexShader, type: .vertex) + self.fragmentShader = try compileShader(fragmentShader, type: .fragment) glAttachShader(program, self.vertexShader) glAttachShader(program, self.fragmentShader) @@ -51,21 +50,21 @@ public class ShaderProgram { try link() } - public convenience init(vertexShader:String, fragmentShaderFile:URL) throws { - try self.init(vertexShader:vertexShader, fragmentShader:try shaderFromFile(fragmentShaderFile)) + public convenience init(vertexShader: String, fragmentShaderFile: URL) throws { + try self.init(vertexShader: vertexShader, fragmentShader: try shaderFromFile(fragmentShaderFile)) } - public convenience init(vertexShaderFile:URL, fragmentShaderFile:URL) throws { - try self.init(vertexShader:try shaderFromFile(vertexShaderFile), fragmentShader:try shaderFromFile(fragmentShaderFile)) + public convenience init(vertexShaderFile: URL, fragmentShaderFile: URL) throws { + try self.init(vertexShader: try shaderFromFile(vertexShaderFile), fragmentShader: try shaderFromFile(fragmentShaderFile)) } deinit { - //debugPrint("Shader deallocated") + // debugPrint("Shader deallocated") - if (vertexShader != nil) { + if vertexShader != nil { glDeleteShader(vertexShader) } - if (fragmentShader != nil) { + if fragmentShader != nil { glDeleteShader(fragmentShader) } glDeleteProgram(program) @@ -74,16 +73,16 @@ public class ShaderProgram { // MARK: - // MARK: Attributes and uniforms - public func attributeIndex(_ attribute:String) -> GLuint? { + public func attributeIndex(_ attribute: String) -> GLuint? { if let attributeAddress = attributeAddresses[attribute], !ShaderProgram.disableAttributeCache { return attributeAddress } else { - var attributeAddress:GLint = -1 - attribute.withGLChar{glString in + var attributeAddress: GLint = -1 + attribute.withGLChar {glString in attributeAddress = glGetAttribLocation(self.program, glString) } - if (attributeAddress < 0) { + if attributeAddress < 0 { return nil } else { glEnableVertexAttribArray(GLuint(attributeAddress)) @@ -95,16 +94,16 @@ public class ShaderProgram { } } - public func uniformIndex(_ uniform:String) -> GLint? { + public func uniformIndex(_ uniform: String) -> GLint? { if let uniformAddress = uniformAddresses[uniform] { return uniformAddress } else { - var uniformAddress:GLint = -1 - uniform.withGLChar{glString in + var uniformAddress: GLint = -1 + uniform.withGLChar {glString in uniformAddress = glGetUniformLocation(self.program, glString) } - if (uniformAddress < 0) { + if uniformAddress < 0 { return nil } else { uniformAddresses[uniform] = uniformAddress @@ -116,48 +115,48 @@ public class ShaderProgram { // MARK: - // MARK: Uniform accessors - public func setValue(_ value:GLfloat, forUniform:String) { + public func setValue(_ value: GLfloat, forUniform: String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if (currentUniformFloatValues[forUniform] != value) { + if currentUniformFloatValues[forUniform] != value { glUniform1f(GLint(uniformAddress), value) currentUniformFloatValues[forUniform] = value } } - public func setValue(_ value:GLint, forUniform:String) { + public func setValue(_ value: GLint, forUniform: String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if (currentUniformIntValues[forUniform] != value) { + if currentUniformIntValues[forUniform] != value { glUniform1i(GLint(uniformAddress), value) currentUniformIntValues[forUniform] = value } } - public func setValue(_ value:Color, forUniform:String) { + public func setValue(_ value: Color, forUniform: String) { if colorUniformsUseFourComponents { - self.setValue(value.toGLArrayWithAlpha(), forUniform:forUniform) + self.setValue(value.toGLArrayWithAlpha(), forUniform: forUniform) } else { - self.setValue(value.toGLArray(), forUniform:forUniform) + self.setValue(value.toGLArray(), forUniform: forUniform) } } - public func setValue(_ value:[GLfloat], forUniform:String) { + public func setValue(_ value: [GLfloat], forUniform: String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value{ + if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value { } else { - if (value.count == 2) { + if value.count == 2 { glUniform2fv(uniformAddress, 1, value) - } else if (value.count == 3) { + } else if value.count == 3 { glUniform3fv(uniformAddress, 1, value) - } else if (value.count == 4) { + } else if value.count == 4 { glUniform4fv(uniformAddress, 1, value) } else { fatalError("Tried to set a float array uniform outside of the range of values") @@ -166,16 +165,16 @@ public class ShaderProgram { } } - public func setMatrix(_ value:[GLfloat], forUniform:String) { + public func setMatrix(_ value: [GLfloat], forUniform: String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value{ + if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value { } else { - if (value.count == 9) { + if value.count == 9 { glUniformMatrix3fv(uniformAddress, 1, GLboolean(GL_FALSE), value) - } else if (value.count == 16) { + } else if value.count == 16 { glUniformMatrix4fv(uniformAddress, 1, GLboolean(GL_FALSE), value) } else { fatalError("Tried to set a matrix uniform outside of the range of supported sizes (3x3, 4x4)") @@ -190,19 +189,19 @@ public class ShaderProgram { func link() throws { glLinkProgram(program) - var linkStatus:GLint = 0 + var linkStatus: GLint = 0 glGetProgramiv(program, GLenum(GL_LINK_STATUS), &linkStatus) - if (linkStatus == 0) { - var logLength:GLint = 0 + if linkStatus == 0 { + var logLength: GLint = 0 glGetProgramiv(program, GLenum(GL_INFO_LOG_LENGTH), &logLength) - if (logLength > 0) { - var compileLog = [CChar](repeating:0, count:Int(logLength)) + if logLength > 0 { + var compileLog = [CChar](repeating: 0, count: Int(logLength)) glGetProgramInfoLog(program, logLength, &logLength, &compileLog) - print("Link log: \(String(cString:compileLog))") + print("Link log: \(String(cString: compileLog))") } - throw ShaderCompileError(compileLog:"Link error") + throw ShaderCompileError(compileLog: "Link error") } } @@ -211,34 +210,34 @@ public class ShaderProgram { } } -func compileShader(_ shaderString:String, type:ShaderType) throws -> GLuint { - let shaderHandle:GLuint +func compileShader(_ shaderString: String, type: ShaderType) throws -> GLuint { + let shaderHandle: GLuint switch type { case .vertex: shaderHandle = glCreateShader(GLenum(GL_VERTEX_SHADER)) case .fragment: shaderHandle = glCreateShader(GLenum(GL_FRAGMENT_SHADER)) } - shaderString.withGLChar{glString in - var tempString:UnsafePointer? = glString + shaderString.withGLChar {glString in + var tempString: UnsafePointer? = glString glShaderSource(shaderHandle, 1, &tempString, nil) glCompileShader(shaderHandle) } - var compileStatus:GLint = 1 + var compileStatus: GLint = 1 glGetShaderiv(shaderHandle, GLenum(GL_COMPILE_STATUS), &compileStatus) - if (compileStatus != 1) { - var logLength:GLint = 0 + if compileStatus != 1 { + var logLength: GLint = 0 glGetShaderiv(shaderHandle, GLenum(GL_INFO_LOG_LENGTH), &logLength) - if (logLength > 0) { - var compileLog = [CChar](repeating:0, count:Int(logLength)) + if logLength > 0 { + var compileLog = [CChar](repeating: 0, count: Int(logLength)) glGetShaderInfoLog(shaderHandle, logLength, &logLength, &compileLog) - print("Compile log: \(String(cString:compileLog))") + print("Compile log: \(String(cString: compileLog))") // let compileLogString = String(bytes:compileLog.map{UInt8($0)}, encoding:NSASCIIStringEncoding) switch type { - case .vertex: throw ShaderCompileError(compileLog:"Vertex shader compile error:") - case .fragment: throw ShaderCompileError(compileLog:"Fragment shader compile error:") + case .vertex: throw ShaderCompileError(compileLog: "Vertex shader compile error:") + case .fragment: throw ShaderCompileError(compileLog: "Fragment shader compile error:") } } } @@ -246,7 +245,7 @@ func compileShader(_ shaderString:String, type:ShaderType) throws -> GLuint { return shaderHandle } -public func crashOnShaderCompileFailure(_ shaderName:String, _ operation:() throws -> T) -> T { +public func crashOnShaderCompileFailure(_ shaderName: String, _ operation:() throws -> T) -> T { do { return try operation() } catch { @@ -255,12 +254,12 @@ public func crashOnShaderCompileFailure(_ shaderName:String, _ operation:() t } } -public func shaderFromFile(_ file:URL) throws -> String { +public func shaderFromFile(_ file: URL) throws -> String { // Note: this is a hack until Foundation's String initializers are fully functional // let fragmentShaderString = String(contentsOfURL:fragmentShaderFile, encoding:NSASCIIStringEncoding) - guard (FileManager.default.fileExists(atPath: file.path)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} + guard FileManager.default.fileExists(atPath: file.path) else { throw ShaderCompileError(compileLog: "Shader file \(file) missing") } - let fragmentShaderString = try NSString(contentsOfFile:file.path, encoding:String.Encoding.ascii.rawValue) + let fragmentShaderString = try NSString(contentsOfFile: file.path, encoding: String.Encoding.ascii.rawValue) - return String(describing:fragmentShaderString) + return String(describing: fragmentShaderString) } diff --git a/framework/Source/ShaderUniformSettings.swift b/framework/Source/ShaderUniformSettings.swift index d2835db5..91b34b6b 100644 --- a/framework/Source/ShaderUniformSettings.swift +++ b/framework/Source/ShaderUniformSettings.swift @@ -14,8 +14,8 @@ public struct ShaderUniformSettings { private static var lock = os_unfair_lock_s() - private var _uniformValues = [String:Any]() - private var uniformValues: [String:Any] { + private var _uniformValues = [String: Any]() + private var uniformValues: [String: Any] { get { os_unfair_lock_lock(&Self.lock) let temp = _uniformValues @@ -32,52 +32,52 @@ public struct ShaderUniformSettings { public init() { } - public subscript(index:String) -> Float? { - get { return uniformValues[index] as? Float} + public subscript(index: String) -> Float? { + get { return uniformValues[index] as? Float } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Int? { + public subscript(index: String) -> Int? { get { return uniformValues[index] as? Int } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Color? { + public subscript(index: String) -> Color? { get { return uniformValues[index] as? Color } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Position? { + public subscript(index: String) -> Position? { get { return uniformValues[index] as? Position } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Size? { - get { return uniformValues[index] as? Size} + public subscript(index: String) -> Size? { + get { return uniformValues[index] as? Size } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Matrix4x4? { + public subscript(index: String) -> Matrix4x4? { get { return uniformValues[index] as? Matrix4x4 } set(newValue) { uniformValues[index] = newValue } } - public subscript(index:String) -> Matrix3x3? { - get { return uniformValues[index] as? Matrix3x3} + public subscript(index: String) -> Matrix3x3? { + get { return uniformValues[index] as? Matrix3x3 } set(newValue) { uniformValues[index] = newValue } } - public func restoreShaderSettings(_ shader:ShaderProgram) { + public func restoreShaderSettings(_ shader: ShaderProgram) { let finalUniformValues = uniformValues for (uniform, value) in finalUniformValues { switch value { - case let value as Float: shader.setValue(GLfloat(value), forUniform:uniform) - case let value as Int: shader.setValue(GLint(value), forUniform:uniform) - case let value as Color: shader.setValue(value, forUniform:uniform) - case let value as Position: shader.setValue(value.toGLArray(), forUniform:uniform) - case let value as Size: shader.setValue(value.toGLArray(), forUniform:uniform) - case let value as Matrix4x4: shader.setMatrix(value.toRowMajorGLArray(), forUniform:uniform) - case let value as Matrix3x3: shader.setMatrix(value.toRowMajorGLArray(), forUniform:uniform) + case let value as Float: shader.setValue(GLfloat(value), forUniform: uniform) + case let value as Int: shader.setValue(GLint(value), forUniform: uniform) + case let value as Color: shader.setValue(value, forUniform: uniform) + case let value as Position: shader.setValue(value.toGLArray(), forUniform: uniform) + case let value as Size: shader.setValue(value.toGLArray(), forUniform: uniform) + case let value as Matrix4x4: shader.setMatrix(value.toRowMajorGLArray(), forUniform: uniform) + case let value as Matrix3x3: shader.setMatrix(value.toRowMajorGLArray(), forUniform: uniform) default: fatalError("Somehow tried to restore a shader uniform value of an unsupported type: \(value)") } } diff --git a/framework/Source/Size.swift b/framework/Source/Size.swift index 4e55e925..5bee3434 100644 --- a/framework/Source/Size.swift +++ b/framework/Source/Size.swift @@ -1,8 +1,8 @@ public struct Size { - public let width:Float - public let height:Float + public let width: Float + public let height: Float - public init(width:Float, height:Float) { + public init(width: Float, height: Float) { self.width = width self.height = height } diff --git a/framework/Source/TextureInput.swift b/framework/Source/TextureInput.swift index b2a782bf..da83022a 100644 --- a/framework/Source/TextureInput.swift +++ b/framework/Source/TextureInput.swift @@ -15,11 +15,11 @@ public class TextureInput: ImageSource { public let targets = TargetContainer() - let textureFramebuffer:Framebuffer + let textureFramebuffer: Framebuffer - public init(texture:GLuint, size:Size, orientation:ImageOrientation = .portrait) { + public init(texture: GLuint, size: Size, orientation: ImageOrientation = .portrait) { do { - textureFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(size), textureOnly:true, overriddenTexture:texture) + textureFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: orientation, size: GLSize(size), textureOnly: true, overriddenTexture: texture) } catch { fatalError("Could not create framebuffer for custom input texture.") } @@ -29,8 +29,8 @@ public class TextureInput: ImageSource { updateTargetsWithFramebuffer(textureFramebuffer) } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { textureFramebuffer.lock() - target.newFramebufferAvailable(textureFramebuffer, fromSourceIndex:atIndex) + target.newFramebufferAvailable(textureFramebuffer, fromSourceIndex: atIndex) } } diff --git a/framework/Source/TextureOutput.swift b/framework/Source/TextureOutput.swift index 072e07c6..7523c298 100644 --- a/framework/Source/TextureOutput.swift +++ b/framework/Source/TextureOutput.swift @@ -13,12 +13,12 @@ #endif public class TextureOutput: ImageConsumer { - public var newTextureAvailableCallback:((GLuint) -> ())? + public var newTextureAvailableCallback: ((GLuint) -> Void)? public let sources = SourceContainer() - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { newTextureAvailableCallback?(framebuffer.texture) // TODO: Maybe extend the lifetime of the texture past this if needed framebuffer.unlock() diff --git a/framework/Source/TextureSamplingOperation.swift b/framework/Source/TextureSamplingOperation.swift index 60fc3451..fca0b0ae 100644 --- a/framework/Source/TextureSamplingOperation.swift +++ b/framework/Source/TextureSamplingOperation.swift @@ -1,13 +1,13 @@ open class TextureSamplingOperation: BasicOperation { - public var overriddenTexelSize:Size? + public var overriddenTexelSize: Size? - public init(vertexShader:String = NearbyTexelSamplingVertexShader, fragmentShader:String, numberOfInputs:UInt = 1) { - super.init(vertexShader:vertexShader, fragmentShader:fragmentShader, numberOfInputs:numberOfInputs) + public init(vertexShader: String = NearbyTexelSamplingVertexShader, fragmentShader: String, numberOfInputs: UInt = 1) { + super.init(vertexShader: vertexShader, fragmentShader: fragmentShader, numberOfInputs: numberOfInputs) } - override open func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + override open func configureFramebufferSpecificUniforms(_ inputFramebuffer: Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for:outputRotation) + let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for: outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height } diff --git a/framework/Source/Timestamp.swift b/framework/Source/Timestamp.swift index 4455a898..81b84669 100644 --- a/framework/Source/Timestamp.swift +++ b/framework/Source/Timestamp.swift @@ -2,8 +2,8 @@ import Foundation // This reimplements CMTime such that it can reach across to Linux public struct TimestampFlags: OptionSet { - public let rawValue:UInt32 - public init(rawValue:UInt32) { self.rawValue = rawValue } + public let rawValue: UInt32 + public init(rawValue: UInt32) { self.rawValue = rawValue } public static let valid = TimestampFlags(rawValue: 1 << 0) public static let hasBeenRounded = TimestampFlags(rawValue: 1 << 1) @@ -13,12 +13,12 @@ public struct TimestampFlags: OptionSet { } public struct Timestamp: Comparable { - let value:Int64 - let timescale:Int32 - let flags:TimestampFlags - let epoch:Int64 + let value: Int64 + let timescale: Int32 + let flags: TimestampFlags + let epoch: Int64 - public init(value:Int64, timescale:Int32, flags:TimestampFlags, epoch:Int64) { + public init(value: Int64, timescale: Int32, flags: TimestampFlags, epoch: Int64) { self.value = value self.timescale = timescale self.flags = flags @@ -30,7 +30,7 @@ public struct Timestamp: Comparable { } } -public func ==(x:Timestamp, y:Timestamp) -> Bool { +public func ==(x: Timestamp, y: Timestamp) -> Bool { // TODO: Fix this // if (x.flags.contains(TimestampFlags.PositiveInfinity) && y.flags.contains(TimestampFlags.PositiveInfinity)) { // return true @@ -40,8 +40,8 @@ public func ==(x:Timestamp, y:Timestamp) -> Bool { // return false // } - let correctedYValue:Int64 - if (x.timescale != y.timescale) { + let correctedYValue: Int64 + if x.timescale != y.timescale { correctedYValue = Int64(round(Double(y.value) * Double(x.timescale) / Double(y.timescale))) } else { correctedYValue = y.value @@ -50,7 +50,7 @@ public func ==(x:Timestamp, y:Timestamp) -> Bool { return ((x.value == correctedYValue) && (x.epoch == y.epoch)) } -public func <(x:Timestamp, y:Timestamp) -> Bool { +public func <(x: Timestamp, y: Timestamp) -> Bool { // TODO: Fix this // if (x.flags.contains(TimestampFlags.PositiveInfinity) || y.flags.contains(TimestampFlags.NegativeInfinity)) { // return false @@ -58,14 +58,14 @@ public func <(x:Timestamp, y:Timestamp) -> Bool { // return true // } - if (x.epoch < y.epoch) { + if x.epoch < y.epoch { return true - } else if (x.epoch > y.epoch) { + } else if x.epoch > y.epoch { return false } - let correctedYValue:Int64 - if (x.timescale != y.timescale) { + let correctedYValue: Int64 + if x.timescale != y.timescale { correctedYValue = Int64(round(Double(y.value) * Double(x.timescale) / Double(y.timescale))) } else { correctedYValue = y.value diff --git a/framework/Source/TwoStageOperation.swift b/framework/Source/TwoStageOperation.swift index 1afd416c..a5b22735 100644 --- a/framework/Source/TwoStageOperation.swift +++ b/framework/Source/TwoStageOperation.swift @@ -1,24 +1,24 @@ open class TwoStageOperation: BasicOperation { - public var overrideDownsamplingOptimization:Bool = false + public var overrideDownsamplingOptimization: Bool = false // override var outputFramebuffer:Framebuffer { get { return Framebuffer } } - var downsamplingFactor:Float? + var downsamplingFactor: Float? - override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { + override func internalRenderFunction(_ inputFramebuffer: Framebuffer, textureProperties: [InputTextureProperties]) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) // Downsample - let internalStageSize:GLSize - let firstStageTextureProperties:[InputTextureProperties] - let downsamplingFramebuffer:Framebuffer? + let internalStageSize: GLSize + let firstStageTextureProperties: [InputTextureProperties] + let downsamplingFramebuffer: Framebuffer? if let downsamplingFactor = downsamplingFactor { - internalStageSize = GLSize(Size(width:max(5.0, Float(renderFramebuffer.size.width) / downsamplingFactor), height:max(5.0, Float(renderFramebuffer.size.height) / downsamplingFactor))) - downsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:internalStageSize, stencil:false) + internalStageSize = GLSize(Size(width: max(5.0, Float(renderFramebuffer.size.width) / downsamplingFactor), height: max(5.0, Float(renderFramebuffer.size.height) / downsamplingFactor))) + downsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: internalStageSize, stencil: false) downsamplingFramebuffer!.lock() downsamplingFramebuffer!.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:nil, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:textureProperties) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: nil, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: textureProperties) releaseIncomingFramebuffers() firstStageTextureProperties = [downsamplingFramebuffer!.texturePropertiesForOutputRotation(.noRotation)] @@ -29,42 +29,42 @@ open class TwoStageOperation: BasicOperation { } // Render first stage - let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:internalStageSize, stencil:false) + let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: internalStageSize, stencil: false) firstStageFramebuffer.lock() firstStageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - let texelSize = inputFramebuffer.initialStageTexelSize(for:outputRotation) + let texelSize = inputFramebuffer.initialStageTexelSize(for: outputRotation) uniformSettings["texelWidth"] = texelSize.width * (downsamplingFactor ?? 1.0) uniformSettings["texelHeight"] = texelSize.height * (downsamplingFactor ?? 1.0) - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:firstStageTextureProperties) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: firstStageTextureProperties) if let downsamplingFramebuffer = downsamplingFramebuffer { downsamplingFramebuffer.unlock() } else { releaseIncomingFramebuffers() } - let secondStageTexelSize = renderFramebuffer.texelSize(for:.noRotation) + let secondStageTexelSize = renderFramebuffer.texelSize(for: .noRotation) uniformSettings["texelWidth"] = secondStageTexelSize.width * (downsamplingFactor ?? 1.0) uniformSettings["texelHeight"] = 0.0 // Render second stage and upsample - if (downsamplingFactor != nil) { - let beforeUpsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:internalStageSize, stencil:false) + if downsamplingFactor != nil { + let beforeUpsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: internalStageSize, stencil: false) beforeUpsamplingFramebuffer.activateFramebufferForRendering() beforeUpsamplingFramebuffer.lock() clearFramebufferWithColor(backgroundColor) - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) firstStageFramebuffer.unlock() renderFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:nil, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[beforeUpsamplingFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: nil, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [beforeUpsamplingFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) beforeUpsamplingFramebuffer.unlock() } else { renderFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(shader, uniformSettings: uniformSettings, vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) firstStageFramebuffer.unlock() } } diff --git a/framework/Source/iOS/CILookupFilter.swift b/framework/Source/iOS/CILookupFilter.swift index 3924b472..a8a45b86 100644 --- a/framework/Source/iOS/CILookupFilter.swift +++ b/framework/Source/iOS/CILookupFilter.swift @@ -64,7 +64,7 @@ public class CILookupFilter { public extension CIFilter { static func filter(with lutUIImage: UIImage) -> CIFilter? { guard let lutCGImage = lutUIImage.cgImage else { - print("ERROR: Invalid colorLUT"); + print("ERROR: Invalid colorLUT") return nil } let size = 64 @@ -74,11 +74,11 @@ public extension CIFilter { let columnCount = lutWidth / size guard lutWidth % size == 0 && lutHeight % size == 0 && rowCount * columnCount == size else { - print("ERROR: Invalid colorLUT image size, width:\(lutWidth) height:\(lutHeight)"); + print("ERROR: Invalid colorLUT image size, width:\(lutWidth) height:\(lutHeight)") return nil } - guard let bitmap = getBytesFromImage(image: lutUIImage) else { + guard let bitmap = getBytesFromImage(image: lutUIImage) else { print("ERROR: Cannot get byte from image") return nil } @@ -94,9 +94,9 @@ public extension CIFilter { for _ in 0 ..< columnCount { for x in 0 ..< size { let alpha = Float(bitmap[bitmapOffset]) / 255.0 - let red = Float(bitmap[bitmapOffset+1]) / 255.0 - let green = Float(bitmap[bitmapOffset+2]) / 255.0 - let blue = Float(bitmap[bitmapOffset+3]) / 255.0 + let red = Float(bitmap[bitmapOffset + 1]) / 255.0 + let green = Float(bitmap[bitmapOffset + 2]) / 255.0 + let blue = Float(bitmap[bitmapOffset + 3]) / 255.0 let dataOffset = (z * size * size + y * size + x) * 4 diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 9ff5ea1a..0eaaf363 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -60,15 +60,15 @@ struct CameraError: Error { let initialBenchmarkFramesToIgnore = 5 public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { - public var location:PhysicalCameraLocation { + public var location: PhysicalCameraLocation { didSet { if oldValue == location { return } configureDeviceInput(location: location, deviceType: deviceType) } } - public var runBenchmark:Bool = false - public var logFPS:Bool = false - public var audioEncodingTarget:AudioEncodingTarget? { + public var runBenchmark: Bool = false + public var logFPS: Bool = false + public var audioEncodingTarget: AudioEncodingTarget? { didSet { guard let audioEncodingTarget = audioEncodingTarget else { return @@ -86,14 +86,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public let targets = TargetContainer() public weak var delegate: CameraDelegate? - public let captureSession:AVCaptureSession + public let captureSession: AVCaptureSession public var outputBufferSize: GLSize? - public var inputCamera:AVCaptureDevice! - public private(set) var videoInput:AVCaptureDeviceInput! - public let videoOutput:AVCaptureVideoDataOutput! - public var microphone:AVCaptureDevice? - public var audioInput:AVCaptureDeviceInput? - public var audioOutput:AVCaptureAudioDataOutput? + public var inputCamera: AVCaptureDevice! + public private(set) var videoInput: AVCaptureDeviceInput! + public let videoOutput: AVCaptureVideoDataOutput! + public var microphone: AVCaptureDevice? + public var audioInput: AVCaptureDeviceInput? + public var audioOutput: AVCaptureAudioDataOutput? public var dontDropFrames: Bool = false public var deviceType: AVCaptureDevice.DeviceType { return inputCamera.deviceType @@ -113,23 +113,22 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - var supportsFullYUVRange:Bool = false - let captureAsYUV:Bool - let yuvConversionShader:ShaderProgram? - let frameRenderingSemaphore = DispatchSemaphore(value:1) - let cameraProcessingQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.cameraProcessingQueue", qos: .default) - let audioProcessingQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.audioProcessingQueue", qos: .default) + var supportsFullYUVRange: Bool = false + let captureAsYUV: Bool + let yuvConversionShader: ShaderProgram? + let frameRenderingSemaphore = DispatchSemaphore(value: 1) + let cameraProcessingQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.cameraProcessingQueue", qos: .default) + let audioProcessingQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.audioProcessingQueue", qos: .default) let framesToIgnore = 5 var numberOfFramesCaptured = 0 - var totalFrameTimeDuringCapture:Double = 0.0 + var totalFrameTimeDuringCapture: Double = 0.0 var framesSinceLastCheck = 0 var lastCheckTime = CACurrentMediaTime() var captureSessionRestartAttempts = 0 - public init(sessionPreset:AVCaptureSession.Preset, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil, metadataObjectTypes: [AVMetadataObject.ObjectType]? = nil, deviceType: AVCaptureDevice.DeviceType = .builtInWideAngleCamera) throws { - + public init(sessionPreset: AVCaptureSession.Preset, cameraDevice: AVCaptureDevice? = nil, location: PhysicalCameraLocation = .backFacing, captureAsYUV: Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil, metadataObjectTypes: [AVMetadataObject.ObjectType]? = nil, deviceType: AVCaptureDevice.DeviceType = .builtInWideAngleCamera) throws { debugPrint("camera init") self.location = location @@ -156,7 +155,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.automaticallyConfiguresCaptureDeviceForWideColor = false do { - self.videoInput = try AVCaptureDeviceInput(device:inputCamera) + self.videoInput = try AVCaptureDeviceInput(device: inputCamera) } catch { self.videoInput = nil self.videoOutput = nil @@ -164,7 +163,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer super.init() throw error } - if (captureSession.canAddInput(videoInput)) { + if captureSession.canAddInput(videoInput) { captureSession.addInput(videoInput) } @@ -183,25 +182,25 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } #endif - if (supportsFullYUVRange) { - yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] + if supportsFullYUVRange { + yuvConversionShader = crashOnShaderCompileFailure("Camera") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] } else { - yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] + yuvConversionShader = crashOnShaderCompileFailure("Camera") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionVideoRangeFragmentShader) } + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA] } - if (captureSession.canAddOutput(videoOutput)) { + if captureSession.canAddOutput(videoOutput) { captureSession.addOutput(videoOutput) } if let photoOutput = photoOutput { self.photoOutput = photoOutput - if (captureSession.canAddOutput(photoOutput)) { + if captureSession.canAddOutput(photoOutput) { captureSession.addOutput(photoOutput) } } @@ -224,7 +223,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer super.init() - videoOutput.setSampleBufferDelegate(self, queue:cameraProcessingQueue) + videoOutput.setSampleBufferDelegate(self, queue: cameraProcessingQueue) NotificationCenter.default.addObserver(self, selector: #selector(Camera.captureSessionRuntimeError(note:)), name: NSNotification.Name.AVCaptureSessionRuntimeError, object: nil) NotificationCenter.default.addObserver(self, selector: #selector(Camera.captureSessionDidStartRunning(note:)), name: NSNotification.Name.AVCaptureSessionDidStartRunning, object: nil) @@ -245,7 +244,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer func configureStabilization() { let stableMode = (location == .backFacing ? backCameraStableMode : frontCameraStableMode) - Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode:stableMode) + Camera.updateVideoOutput(location: location, videoOutput: videoOutput, stableMode: stableMode) } public func configureDeviceInput(location: PhysicalCameraLocation, deviceType: AVCaptureDevice.DeviceType, skipConfiguration: Bool = false) { @@ -284,22 +283,22 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let captureSession = self.captureSession DispatchQueue.global().async { - if (captureSession.isRunning) { + if captureSession.isRunning { // Don't call this on the sharedImageProcessingContext otherwise you may get a deadlock // since this waits for the captureOutput() delegate call to finish. captureSession.stopRunning() } } - sharedImageProcessingContext.runOperationSynchronously{ - self.videoOutput?.setSampleBufferDelegate(nil, queue:nil) - self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) + sharedImageProcessingContext.runOperationSynchronously { + self.videoOutput?.setSampleBufferDelegate(nil, queue: nil) + self.audioOutput?.setSampleBufferDelegate(nil, queue: nil) } } @objc func captureSessionRuntimeError(note: NSNotification) { print("ERROR: Capture session runtime error: \(String(describing: note.userInfo))") - if(self.captureSessionRestartAttempts < 1) { + if self.captureSessionRestartAttempts < 1 { DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { self.startCapture() } @@ -312,7 +311,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } public func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - guard (captureOutput != audioOutput) else { + guard captureOutput != audioOutput else { self.processAudioSampleBuffer(sampleBuffer) return } @@ -321,9 +320,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let notFrameDrop = dontDropFrames - guard notFrameDrop || (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return } + guard notFrameDrop || (frameRenderingSemaphore.wait(timeout: DispatchTime.now()) == DispatchTimeoutResult.success) else { return } - sharedImageProcessingContext.runOperationAsynchronously{ + sharedImageProcessingContext.runOperationAsynchronously { defer { if !notFrameDrop { self.frameRenderingSemaphore.signal() @@ -337,41 +336,41 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let bufferWidth = CVPixelBufferGetWidth(cameraFrame) let bufferHeight = CVPixelBufferGetHeight(cameraFrame) let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - let cameraFramebuffer:Framebuffer + CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + let cameraFramebuffer: Framebuffer self.delegate?.didCaptureBuffer(sampleBuffer) if self.captureAsYUV { - let luminanceFramebuffer:Framebuffer - let chrominanceFramebuffer:Framebuffer + let luminanceFramebuffer: Framebuffer + let chrominanceFramebuffer: Framebuffer if sharedImageProcessingContext.supportsTextureCaches() { - var luminanceTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceTextureRef) + var luminanceTextureRef: CVOpenGLESTexture? + _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceTextureRef) let luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef!) glActiveTexture(GLenum(GL_TEXTURE4)) glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture) glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE) glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE) - luminanceFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true, overriddenTexture:luminanceTexture) + luminanceFramebuffer = try! Framebuffer(context: sharedImageProcessingContext, orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture) - var chrominanceTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceTextureRef) + var chrominanceTextureRef: CVOpenGLESTexture? + _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceTextureRef) let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef!) glActiveTexture(GLenum(GL_TEXTURE5)) glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture) glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE) glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE) - chrominanceFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth / 2), height:GLint(bufferHeight / 2)), textureOnly:true, overriddenTexture:chrominanceTexture) + chrominanceFramebuffer = try! Framebuffer(context: sharedImageProcessingContext, orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth / 2), height: GLint(bufferHeight / 2)), textureOnly: true, overriddenTexture: chrominanceTexture) } else { glActiveTexture(GLenum(GL_TEXTURE4)) - luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) luminanceFramebuffer.lock() glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 0)) glActiveTexture(GLenum(GL_TEXTURE5)) - chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth / 2), height:GLint(bufferHeight / 2)), textureOnly:true) + chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth / 2), height: GLint(bufferHeight / 2)), textureOnly: true) chrominanceFramebuffer.lock() glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 1)) @@ -380,21 +379,21 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let inputSize = luminanceFramebuffer.sizeForTargetOrientation(.portrait).gpuSize let outputSize = self.outputBufferSize ?? luminanceFramebuffer.sizeForTargetOrientation(.portrait) let resizeOutput = limitedSizeAndRatio(of: inputSize, to: outputSize.gpuSize) - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(resizeOutput.finalCropSize), textureOnly:false) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(resizeOutput.finalCropSize), textureOnly: false) - let conversionMatrix:Matrix3x3 - if (self.supportsFullYUVRange) { + let conversionMatrix: Matrix3x3 + if self.supportsFullYUVRange { conversionMatrix = colorConversionMatrix601FullRangeDefault } else { conversionMatrix = colorConversionMatrix601Default } - convertYUVToRGB(shader:self.yuvConversionShader!, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resizeOutput: resizeOutput, resultFramebuffer:cameraFramebuffer, colorConversionMatrix:conversionMatrix) + convertYUVToRGB(shader: self.yuvConversionShader!, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resizeOutput: resizeOutput, resultFramebuffer: cameraFramebuffer, colorConversionMatrix: conversionMatrix) } else { - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: self.location.imageOrientation(), size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true) glBindTexture(GLenum(GL_TEXTURE_2D), cameraFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(cameraFrame)) } - CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) @@ -406,7 +405,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if self.runBenchmark { self.numberOfFramesCaptured += 1 - if (self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore) { + if self.numberOfFramesCaptured > initialBenchmarkFramesToIgnore { let currentFrameTime = (CACurrentMediaTime() - startTime) self.totalFrameTimeDuringCapture += currentFrameTime print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured - initialBenchmarkFramesToIgnore)) ms") @@ -415,7 +414,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } if self.logFPS { - if ((CACurrentMediaTime() - self.lastCheckTime) > 1.0) { + if (CACurrentMediaTime() - self.lastCheckTime) > 1.0 { self.lastCheckTime = CACurrentMediaTime() print("FPS: \(self.framesSinceLastCheck)") self.framesSinceLastCheck = 0 @@ -434,18 +433,18 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.numberOfFramesCaptured = 0 self.totalFrameTimeDuringCapture = 0 - if (!captureSession.isRunning) { + if !captureSession.isRunning { captureSession.startRunning() } } public func stopCapture() { - if (captureSession.isRunning) { + if captureSession.isRunning { captureSession.stopRunning() } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Not needed for camera inputs } @@ -453,7 +452,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer // MARK: Audio processing public func addAudioInputsAndOutputs() throws { - guard (audioOutput == nil) else { return } + guard audioOutput == nil else { return } captureSession.beginConfiguration() defer { @@ -461,7 +460,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } microphone = AVCaptureDevice.default(for: .audio) guard let microphone = microphone else { return } - audioInput = try AVCaptureDeviceInput(device:microphone) + audioInput = try AVCaptureDeviceInput(device: microphone) guard let audioInput = audioInput else { return } if captureSession.canAddInput(audioInput) { captureSession.addInput(audioInput) @@ -470,12 +469,12 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if captureSession.canAddOutput(output) { captureSession.addOutput(output) } - output.setSampleBufferDelegate(self, queue:audioProcessingQueue) + output.setSampleBufferDelegate(self, queue: audioProcessingQueue) audioOutput = output } public func removeAudioInputsAndOutputs() { - guard (audioOutput != nil) else { return } + guard audioOutput != nil else { return } captureSession.beginConfiguration() captureSession.removeInput(audioInput!) @@ -486,7 +485,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.commitConfiguration() } - func processAudioSampleBuffer(_ sampleBuffer:CMSampleBuffer) { + func processAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) { self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) } } diff --git a/framework/Source/iOS/FramebufferGenerator.swift b/framework/Source/iOS/FramebufferGenerator.swift index f8fe4bd4..ee74a180 100644 --- a/framework/Source/iOS/FramebufferGenerator.swift +++ b/framework/Source/iOS/FramebufferGenerator.swift @@ -14,7 +14,6 @@ public class FramebufferGenerator { private var renderFramebuffer: Framebuffer? public init() { - } public func generateFromYUVBuffer(_ yuvPixelBuffer: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { @@ -72,8 +71,8 @@ private extension FramebufferGenerator { let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!) glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture) - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)) let luminanceFramebuffer: Framebuffer do { @@ -99,14 +98,14 @@ private extension FramebufferGenerator { let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!) glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture) - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)); - glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)); + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)) let chrominanceFramebuffer: Framebuffer do { chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: originalOrientation, - size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), + size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture) } catch { @@ -155,19 +154,18 @@ private extension FramebufferGenerator { let bufferSize = framebuffer.size var cachedTextureRef: CVOpenGLESTexture? - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - renderFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + renderFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: bufferSize, textureOnly: false, overriddenTexture: cachedTexture) renderFramebuffer?.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) - CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: sharedImageProcessingContext) glFinish() - } - catch { + } catch { print("WARNING: Trouble appending pixel buffer at time: \(framebuffer.timingStyle.timestamp?.seconds() ?? 0) \(error)") } @@ -176,7 +174,7 @@ private extension FramebufferGenerator { } func _createPixelBufferPool(_ width: Int32, _ height: Int32, _ pixelFormat: FourCharCode, _ maxBufferCount: Int32) -> CVPixelBufferPool? { - var outputPool: CVPixelBufferPool? = nil + var outputPool: CVPixelBufferPool? let sourcePixelBufferOptions: NSDictionary = [kCVPixelBufferPixelFormatTypeKey: pixelFormat, kCVPixelBufferWidthKey: width, diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index 3ec2851e..fa2ebe6e 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -76,14 +76,14 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { fps: Double, size: Size, needAlignAV: Bool, - fileType:AVFileType = .mov, - liveVideo:Bool = false, - videoSettings:[String:Any]? = nil, - videoNaturalTimeScale:CMTimeScale? = nil, + fileType: AVFileType = .mov, + liveVideo: Bool = false, + videoSettings: [String: Any]? = nil, + videoNaturalTimeScale: CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, - audioSettings:[String:Any]? = nil, - audioSourceFormatHint:CMFormatDescription? = nil, + audioSettings: [String: Any]? = nil, + audioSourceFormatHint: CMFormatDescription? = nil, _ configure: ((MovieOutput) -> Void)? = nil) { MovieOutput.movieProcessingContext.runOperationAsynchronously { [weak self] in self?._setMovieOutputIfNotReady(url: url, @@ -146,7 +146,7 @@ extension MovieCache { _writeAudioSampleBuffers(shouldInvalidateSampleWhenDone) } - public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { + public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { guard shouldProcessBuffer else { return } _cacheVideoSampleBuffer(sampleBuffer) _writeVideoSampleBuffers(shouldInvalidateSampleWhenDone) @@ -202,11 +202,11 @@ private extension MovieCache { needAlignAV: Bool, fileType: AVFileType = .mov, liveVideo: Bool = false, - videoSettings: [String:Any]? = nil, + videoSettings: [String: Any]? = nil, videoNaturalTimeScale: CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, - audioSettings: [String:Any]? = nil, + audioSettings: [String: Any]? = nil, audioSourceFormatHint: CMFormatDescription? = nil, _ configure: ((MovieOutput) -> Void)? = nil) { guard !isReadyToWrite else { diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 342df1f4..f8842226 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -15,28 +15,28 @@ public class MovieInput: ImageSource { public weak var delegate: MovieInputDelegate? - public private(set) var audioEncodingTarget:AudioEncodingTarget? + public private(set) var audioEncodingTarget: AudioEncodingTarget? - let yuvConversionShader:ShaderProgram - public let asset:AVAsset - let videoComposition:AVVideoComposition? - var playAtActualSpeed:Bool + let yuvConversionShader: ShaderProgram + public let asset: AVAsset + let videoComposition: AVVideoComposition? + var playAtActualSpeed: Bool // Time in the video where it should start. It will be reset when looping. - var requestedStartTime:CMTime? + var requestedStartTime: CMTime? // Time in the video where it should start for trimmed start. - var trimmedStartTime:CMTime? + var trimmedStartTime: CMTime? // Time in the video where it started. - var startTime:CMTime? + var startTime: CMTime? // Duration of the video from startTime for trimming. - var trimmedDuration:CMTime? + var trimmedDuration: CMTime? // Time according to device clock when the video started. - var actualStartTime:DispatchTime? + var actualStartTime: DispatchTime? // Last sample time that played. - private(set) public var currentTime:CMTime? + private(set) public var currentTime: CMTime? - public var loop:Bool - public var playrate:Double + public var loop: Bool + public var playrate: Double // Called after the video finishes. Not called when cancel() or pause() is called. public var completion: ((Error?) -> Void)? @@ -45,7 +45,7 @@ public class MovieInput: ImageSource { // Can be used to check video encoding progress. Not called from main thread. public var progress: ((Double) -> Void)? - public var synchronizedMovieOutput:MovieOutput? { + public var synchronizedMovieOutput: MovieOutput? { didSet { self.enableSynchronizedEncoding() } @@ -57,8 +57,8 @@ public class MovieInput: ImageSource { } let conditionLock = NSCondition() var readingShouldWait = false - var videoInputStatusObserver:NSKeyValueObservation? - var audioInputStatusObserver:NSKeyValueObservation? + var videoInputStatusObserver: NSKeyValueObservation? + var audioInputStatusObserver: NSKeyValueObservation? let maxFPS: Float? lazy var framebufferGenerator = FramebufferGenerator() @@ -71,18 +71,18 @@ public class MovieInput: ImageSource { } } var timebaseInfo = mach_timebase_info_data_t() - var currentThread:Thread? + var currentThread: Thread? var totalFramesSent = 0 - var totalFrameTimeDuringCapture:Double = 0.0 + var totalFrameTimeDuringCapture: Double = 0.0 - var audioSettings:[String:Any]? + var audioSettings: [String: Any]? - var movieFramebuffer:Framebuffer? - public var framebufferUserInfo:[AnyHashable:Any]? + var movieFramebuffer: Framebuffer? + public var framebufferUserInfo: [AnyHashable: Any]? // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works - public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false, playrate:Double = 1.0, audioSettings:[String:Any]? = nil, maxFPS: Float? = nil) throws { + public init(asset: AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = false, loop: Bool = false, playrate: Double = 1.0, audioSettings: [String: Any]? = nil, maxFPS: Float? = nil) throws { debugPrint("movie input init \(asset)") self.asset = asset @@ -90,15 +90,15 @@ public class MovieInput: ImageSource { self.playAtActualSpeed = playAtActualSpeed self.loop = loop self.playrate = playrate - self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} + self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) } self.audioSettings = audioSettings self.maxFPS = maxFPS } - public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false, playrate: Double = 1.0, audioSettings:[String:Any]? = nil) throws { - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] - let inputAsset = AVURLAsset(url:url, options:inputOptions) - try self.init(asset:inputAsset, videoComposition: nil, playAtActualSpeed:playAtActualSpeed, loop:loop, playrate:playrate, audioSettings:audioSettings) + public convenience init(url: URL, playAtActualSpeed: Bool = false, loop: Bool = false, playrate: Double = 1.0, audioSettings: [String: Any]? = nil) throws { + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true)] + let inputAsset = AVURLAsset(url: url, options: inputOptions) + try self.init(asset: inputAsset, videoComposition: nil, playAtActualSpeed: playAtActualSpeed, loop: loop, playrate: playrate, audioSettings: audioSettings) } deinit { @@ -175,21 +175,19 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Internal processing functions - func createReader() -> AVAssetReader? - { + func createReader() -> AVAssetReader? { do { - let outputSettings:[String:AnyObject] = - [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + let outputSettings: [String: AnyObject] = + [(kCVPixelBufferPixelFormatTypeKey as String): NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] let assetReader = try AVAssetReader(asset: self.asset) try NSObject.catchException { - if(self.videoComposition == nil) { - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: .video).first!, outputSettings:outputSettings) + if self.videoComposition == nil { + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: .video).first!, outputSettings: outputSettings) readerVideoTrackOutput.alwaysCopiesSampleData = false assetReader.add(readerVideoTrackOutput) - } - else { + } else { let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: .video), videoSettings: outputSettings) readerVideoTrackOutput.videoComposition = self.videoComposition readerVideoTrackOutput.alwaysCopiesSampleData = false @@ -235,13 +233,11 @@ public class MovieInput: ImageSource { mach_timebase_info(&timebaseInfo) - if(useRealtimeThreads) { + if useRealtimeThreads { self.configureThread() - } - else if(playAtActualSpeed) { + } else if playAtActualSpeed { thread.qualityOfService = .userInitiated - } - else { + } else { // This includes synchronized encoding since the above vars will be disabled for it. thread.qualityOfService = .default } @@ -259,47 +255,45 @@ public class MovieInput: ImageSource { return } } - } - catch { + } catch { print("ERROR: Unable to start reading: \(error)") completion?(error) return } - var readerVideoTrackOutput:AVAssetReaderOutput? = nil - var readerAudioTrackOutput:AVAssetReaderOutput? = nil + var readerVideoTrackOutput: AVAssetReaderOutput? + var readerAudioTrackOutput: AVAssetReaderOutput? for output in assetReader.outputs { - if(output.mediaType == .video) { + if output.mediaType == .video { readerVideoTrackOutput = output } - if(output.mediaType == .audio) { + if output.mediaType == .audio { readerAudioTrackOutput = output } } - while(assetReader.status == .reading) { - if(thread.isCancelled) { break } + while assetReader.status == .reading { + if thread.isCancelled { break } autoreleasepool { if let movieOutput = self.synchronizedMovieOutput { self.conditionLock.lock() - if(self.readingShouldWait) { + if self.readingShouldWait { self.synchronizedEncodingDebugPrint("Disable reading") self.conditionLock.wait() self.synchronizedEncodingDebugPrint("Enable reading") } self.conditionLock.unlock() - if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData) { + if movieOutput.assetWriterVideoInput.isReadyForMoreMediaData { self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) } - if(movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { + if movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false { if let readerAudioTrackOutput = readerAudioTrackOutput { self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } } - } - else { + } else { self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) if let readerAudioTrackOutput = readerAudioTrackOutput, self.audioEncodingTarget?.readyForNextAudioBuffer() ?? true { @@ -318,11 +312,10 @@ public class MovieInput: ImageSource { assetReader.cancelReading() // Start the video over so long as it wasn't cancelled. - if (self.loop && !thread.isCancelled) { + if self.loop && !thread.isCancelled { self.currentThread = Thread(target: self, selector: #selector(self.beginReading), object: nil) self.currentThread?.start() - } - else { + } else { self.synchronizedEncodingDebugPrint("MovieInput finished reading") self.synchronizedEncodingDebugPrint("MovieInput total frames sent: \(self.totalFramesSent)") self.delegate?.didFinishMovie() @@ -342,7 +335,7 @@ public class MovieInput: ImageSource { } } - func readNextVideoFrame(with assetReader: AVAssetReader, from videoTrackOutput:AVAssetReaderOutput) { + func readNextVideoFrame(with assetReader: AVAssetReader, from videoTrackOutput: AVAssetReaderOutput) { guard let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() else { if let movieOutput = self.synchronizedMovieOutput { MovieOutput.movieProcessingContext.runOperationAsynchronously { @@ -356,7 +349,7 @@ public class MovieInput: ImageSource { } if delegate != nil { - sharedImageProcessingContext.runOperationSynchronously{ [weak self] in + sharedImageProcessingContext.runOperationSynchronously { [weak self] in self?.delegate?.didReadVideoFrame(sampleBuffer) } } @@ -375,7 +368,7 @@ public class MovieInput: ImageSource { } } - progress?(currentSampleTime.seconds/duration.seconds) + progress?(currentSampleTime.seconds / duration.seconds) if transcodingOnly, let movieOutput = synchronizedMovieOutput { movieOutput.processVideoBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) @@ -400,7 +393,6 @@ public class MovieInput: ImageSource { } func processNextVideoSampleOnGLThread(_ sampleBuffer: CMSampleBuffer, currentSampleTime: CMTime) { - synchronizedEncodingDebugPrint("Process video frame input. Time:\(CMTimeGetSeconds(currentSampleTime))") if playAtActualSpeed { @@ -415,27 +407,26 @@ public class MovieInput: ImageSource { // The reason we subtract the actualStartTime from the currentActualTime is so the actual time starts at zero relative to the video start. let delay = currentSampleTimeNanoseconds - Int64(currentActualTime.uptimeNanoseconds - actualStartTime!.uptimeNanoseconds) - //print("currentSampleTime: \(currentSampleTimeNanoseconds) currentTime: \((currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds)) delay: \(delay)") + // print("currentSampleTime: \(currentSampleTimeNanoseconds) currentTime: \((currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds)) delay: \(delay)") if delay > 0 { mach_wait_until(mach_absolute_time() + nanosToAbs(UInt64(delay))) - } - else { + } else { // This only happens if we aren't given enough processing time for playback // but is necessary otherwise the playback will never catch up to its timeline. // If we weren't adhearing to the sample timline and used the old timing method // the video would still lag during an event like this. - //print("Dropping frame in order to catch up") + // print("Dropping frame in order to catch up") return } } sharedImageProcessingContext.runOperationSynchronously { - self.process(movieFrame:sampleBuffer) + self.process(movieFrame: sampleBuffer) } } - func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput:AVAssetReaderOutput) { + func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput: AVAssetReaderOutput) { let shouldInvalidate = !transcodingOnly guard let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() else { if let movieOutput = self.synchronizedMovieOutput { @@ -460,14 +451,14 @@ public class MovieInput: ImageSource { } } - func process(movieFrame frame:CMSampleBuffer) { + func process(movieFrame frame: CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! - self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) + self.process(movieFrame: movieFrame, withSampleTime: currentSampleTime) } - func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { + func process(movieFrame: CVPixelBuffer, withSampleTime: CMTime) { let startTime = CACurrentMediaTime() guard let framebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: withSampleTime, videoOrientation: videoOrientation) else { @@ -478,7 +469,7 @@ public class MovieInput: ImageSource { self.movieFramebuffer = framebuffer self.updateTargetsWithFramebuffer(framebuffer) - if(self.runBenchmark || self.synchronizedEncodingDebug) { + if self.runBenchmark || self.synchronizedEncodingDebug { self.totalFramesSent += 1 } @@ -490,7 +481,7 @@ public class MovieInput: ImageSource { } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // Not needed for movie inputs } @@ -512,7 +503,7 @@ public class MovieInput: ImageSource { try audioEncodingTarget.activateAudioTrack() // Call enableSynchronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput. - if(synchronizedMovieOutput != nil) { self.enableSynchronizedEncoding() } + if synchronizedMovieOutput != nil { self.enableSynchronizedEncoding() } } // MARK: - @@ -536,11 +527,11 @@ public class MovieInput: ImageSource { guard let movieOutput = self.synchronizedMovieOutput else { return } - self.videoInputStatusObserver = movieOutput.assetWriterVideoInput.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] (assetWriterVideoInput, change) in + self.videoInputStatusObserver = movieOutput.assetWriterVideoInput.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] _, _ in guard let weakSelf = self else { return } weakSelf.updateLock() } - self.audioInputStatusObserver = movieOutput.assetWriterAudioInput?.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] (assetWriterAudioInput, change) in + self.audioInputStatusObserver = movieOutput.assetWriterAudioInput?.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] _, _ in guard let weakSelf = self else { return } weakSelf.updateLock() } @@ -551,11 +542,10 @@ public class MovieInput: ImageSource { self.conditionLock.lock() // Allow reading if either input is able to accept data, prevent reading if both inputs are unable to accept data. - if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData || movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { + if movieOutput.assetWriterVideoInput.isReadyForMoreMediaData || movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false { self.readingShouldWait = false self.conditionLock.signal() - } - else { + } else { self.readingShouldWait = true } self.conditionLock.unlock() @@ -580,9 +570,9 @@ public class MovieInput: ImageSource { let period = UInt32(0 * clock2abs) // According to the above scheduling chapter this constraint only appears relevant // if preemtible is set to true and the period is not 0. If this is wrong, please let me know. - let constraint = UInt32(5 * clock2abs) + let constraint = UInt32(5 * clock2abs) - //print("period: \(period) computation: \(computation) constraint: \(constraint)") + // print("period: \(period) computation: \(computation) constraint: \(constraint)") let THREAD_TIME_CONSTRAINT_POLICY_COUNT = mach_msg_type_number_t(MemoryLayout.size / MemoryLayout.size) @@ -612,6 +602,6 @@ public class MovieInput: ImageSource { } func synchronizedEncodingDebugPrint(_ string: String) { - if(synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print(string) } + if synchronizedMovieOutput != nil && synchronizedEncodingDebug { print(string) } } } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 256cf949..deb4014e 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -4,7 +4,7 @@ import UIKit public protocol AudioEncodingTarget { func activateAudioTrack() throws - func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) + func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) // Note: This is not used for synchronized encoding. func readyForNextAudioBuffer() -> Bool } @@ -51,7 +51,7 @@ public enum MovieOutputState: String { public class MovieOutput: ImageConsumer, AudioEncodingTarget { private static let assetWriterQueue = DispatchQueue(label: "com.GPUImage2.MovieOutput.assetWriterQueue", qos: .userInitiated) public let sources = SourceContainer() - public let maximumInputs:UInt = 1 + public let maximumInputs: UInt = 1 public weak var delegate: MovieOutputDelegate? @@ -60,12 +60,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public var videoID: String? public var writerStatus: AVAssetWriter.Status { assetWriter.status } public var writerError: Error? { assetWriter.error } - private let assetWriter:AVAssetWriter - let assetWriterVideoInput:AVAssetWriterInput - var assetWriterAudioInput:AVAssetWriterInput? - private let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor + private let assetWriter: AVAssetWriter + let assetWriterVideoInput: AVAssetWriterInput + var assetWriterAudioInput: AVAssetWriterInput? + private let assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor public let size: Size - private let colorSwizzlingShader:ShaderProgram + private let colorSwizzlingShader: ShaderProgram public let needAlignAV: Bool var videoEncodingIsFinished = false var audioEncodingIsFinished = false @@ -79,7 +79,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private var previousVideoEndTime: CMTime? private var previousAudioEndTime: CMTime? - var encodingLiveVideo:Bool { + var encodingLiveVideo: Bool { didSet { assetWriterVideoInput.expectsMediaDataInRealTime = encodingLiveVideo assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo @@ -87,12 +87,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } private var ciFilter: CILookupFilter? private var cpuCIContext: CIContext? - public private(set) var pixelBuffer:CVPixelBuffer? = nil + public private(set) var pixelBuffer: CVPixelBuffer? public var waitUtilDataIsReadyForLiveVideo = false - public private(set) var renderFramebuffer:Framebuffer! + public private(set) var renderFramebuffer: Framebuffer! - public private(set) var audioSettings:[String:Any]? = nil - public private(set) var audioSourceFormatHint:CMFormatDescription? + public private(set) var audioSettings: [String: Any]? + public private(set) var audioSourceFormatHint: CMFormatDescription? public static let movieProcessingContext: OpenGLContext = { var context: OpenGLContext? @@ -126,8 +126,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var preferredTransform: CGAffineTransform? private var isProcessing = false - public init(URL:Foundation.URL, fps: Double, size:Size, needAlignAV: Bool = true, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { - + public init(URL: Foundation.URL, fps: Double, size: Size, needAlignAV: Bool = true, fileType: AVFileType = .mov, liveVideo: Bool = false, videoSettings: [String: Any]? = nil, videoNaturalTimeScale: CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings: [String: Any]? = nil, audioSourceFormatHint: CMFormatDescription? = nil) throws { print("movie output init \(URL)") self.url = URL self.fps = fps @@ -136,29 +135,29 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { if sharedImageProcessingContext.supportsTextureCaches() { self.colorSwizzlingShader = sharedImageProcessingContext.passthroughShader } else { - self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:ColorSwizzlingFragmentShader)} + self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader: ColorSwizzlingFragmentShader) } } self.size = size - assetWriter = try AVAssetWriter(url:URL, fileType:fileType) + assetWriter = try AVAssetWriter(url: URL, fileType: fileType) if optimizeForNetworkUse { // NOTE: this is neccessary for streaming play support, but it will slow down finish writing speed assetWriter.shouldOptimizeForNetworkUse = true } - var localSettings:[String:Any] + var localSettings: [String: Any] if let videoSettings = videoSettings { localSettings = videoSettings } else { - localSettings = [String:Any]() + localSettings = [String: Any]() } localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? size.width localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? size.height - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecType.h264.rawValue + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecType.h264.rawValue - assetWriterVideoInput = AVAssetWriterInput(mediaType:.video, outputSettings:localSettings) + assetWriterVideoInput = AVAssetWriterInput(mediaType: .video, outputSettings: localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo // You should provide a naturalTimeScale if you have one for the current media. @@ -168,19 +167,18 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriterVideoInput.mediaTimeScale = naturalTimeScale // This is set to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. assetWriter.movieFragmentInterval = CMTime(seconds: 1, preferredTimescale: naturalTimeScale) - } - else { + } else { assetWriter.movieFragmentInterval = CMTime(seconds: 1, preferredTimescale: 1000) } encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. - let sourcePixelBufferAttributesDictionary:[String:Any] = [kCVPixelBufferPixelFormatTypeKey as String:Int32(kCVPixelFormatType_32BGRA), - kCVPixelBufferWidthKey as String:self.size.width, - kCVPixelBufferHeightKey as String:self.size.height] + let sourcePixelBufferAttributesDictionary: [String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int32(kCVPixelFormatType_32BGRA), + kCVPixelBufferWidthKey as String: self.size.width, + kCVPixelBufferHeightKey as String: self.size.height] - assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) + assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) assetWriter.add(assetWriterVideoInput) self.disablePixelBufferAttachments = disablePixelBufferAttachments @@ -190,13 +188,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func setupSoftwareLUTFilter(lutImage: UIImage, intensity: Double? = nil, brightnessFactor: Double? = nil, sync: Bool = true) { - let block: () -> () = { [weak self] in + let block: () -> Void = { [weak self] in if self?.cpuCIContext == nil { let colorSpace = CGColorSpaceCreateDeviceRGB() let options: [CIContextOption: AnyObject] = [ .workingColorSpace: colorSpace, - .outputColorSpace : colorSpace, - .useSoftwareRenderer : NSNumber(value: true) + .outputColorSpace: colorSpace, + .useSoftwareRenderer: NSNumber(value: true) ] self?.cpuCIContext = CIContext(options: options) } @@ -210,7 +208,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } public func cleanSoftwareFilter(sync: Bool = true) { - let block: () -> () = { [weak self] in + let block: () -> Void = { [weak self] in self?.ciFilter = nil } if sync { @@ -250,7 +248,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { success = assetWriter.startWriting() } - if(!success) { + if !success { throw MovieOutputError.startWritingError(assetWriterError: self.assetWriter.error) } @@ -304,14 +302,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.assetWriterAudioInput?.markAsFinished() self.assetWriterVideoInput.markAsFinished() - var lastFrameTime: CMTime? if let lastVideoFrame = self.previousVideoStartTime { if !self.needAlignAV { print("MovieOutput start endSession") lastFrameTime = lastVideoFrame self.assetWriter.endSession(atSourceTime: lastVideoFrame) - } else if let lastAudioTime = self.previousAudioEndTime, let lastVideoTime = self.previousVideoEndTime { + } else if let lastAudioTime = self.previousAudioEndTime, let lastVideoTime = self.previousVideoEndTime { let endTime = min(lastAudioTime, lastVideoTime) lastFrameTime = endTime print("MovieOutput start endSession, last audio end time is:\(lastAudioTime.seconds), last video end time is:\(lastVideoTime.seconds), end time is:\(endTime.seconds)") @@ -366,7 +363,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { self.videoEncodingIsFinished = true } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { glFinish() if previousVideoStartTime == nil { @@ -384,8 +381,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur // since the framebuffers come in much slower than during synchronized encoding. sharedImageProcessingContext.runOperationAsynchronously(work) - } - else { + } else { // This is done synchronously to prevent framebuffers from piling up during synchronized encoding. // If we don't force the sharedImageProcessingContext queue to wait for this frame to finish processing it will // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext queue would run when the system wants it to. @@ -444,7 +440,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { print("MovieOutput WARNING: Unable to create pixel buffer, dropping frame") continue } - try renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) + try renderIntoPixelBuffer(pixelBuffer!, framebuffer: framebuffer) guard assetWriterVideoInput.isReadyForMoreMediaData || shouldWaitForEncoding else { print("MovieOutput WARNING: Had to drop a frame at time \(frameTime)") continue @@ -506,7 +502,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { return false } - func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) throws { + func renderIntoPixelBuffer(_ pixelBuffer: CVPixelBuffer, framebuffer: Framebuffer) throws { // Is this the first pixel buffer we have recieved? // NOTE: this will cause strange frame brightness blinking for the first few seconds, be careful about using this. if renderFramebuffer == nil && !disablePixelBufferAttachments { @@ -516,7 +512,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } let bufferSize = GLSize(self.size) - var cachedTextureRef:CVOpenGLESTexture? = nil + var cachedTextureRef: CVOpenGLESTexture? let ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) if ret != kCVReturnSuccess { print("MovieOutput ret error: \(ret), pixelBuffer: \(pixelBuffer)") @@ -524,23 +520,23 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - renderFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + renderFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: bufferSize, textureOnly: false, overriddenTexture: cachedTexture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) - CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: sharedImageProcessingContext) + CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + renderQuadWithShader(colorSwizzlingShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: sharedImageProcessingContext) if sharedImageProcessingContext.supportsTextureCaches() { glFinish() } else { glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) } - CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) } // MARK: Append buffer directly from CMSampleBuffer - public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { + public func processVideoBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { let work = { [weak self] in _ = self?._processVideoSampleBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) } @@ -590,7 +586,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { previousVideoStartTime = frameTime - guard (assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding) else { + guard assetWriterVideoInput.isReadyForMoreMediaData || self.shouldWaitForEncoding else { print("MovieOutput Had to drop a frame at time \(frameTime)") continue } @@ -629,7 +625,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { try NSObject.catchException { appendResult = bufferInput.append(buffer, withPresentationTime: frameTime) } - if (!appendResult) { + if !appendResult { print("MovieOutput WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: assetWriter.error))") continue } @@ -651,7 +647,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { guard assetWriter.status != .writing && assetWriter.status != .completed else { throw MovieOutputError.activeAudioTrackError } - assetWriterAudioInput = AVAssetWriterInput(mediaType:.audio, outputSettings:self.audioSettings, sourceFormatHint:self.audioSourceFormatHint) + assetWriterAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: self.audioSettings, sourceFormatHint: self.audioSourceFormatHint) let assetWriter = self.assetWriter let audioInpupt = self.assetWriterAudioInput! try NSObject.catchException { @@ -660,7 +656,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } - public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { + public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { let work = { [weak self] in _ = self?._processAudioSampleBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: shouldInvalidateSampleWhenDone) } @@ -724,8 +720,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { CMSampleBufferInvalidate(audioBuffer) } processedBufferCount += 1 - } - catch { + } catch { print("MovieOutput WARNING: Trouble appending audio sample buffer: \(error)") continue } @@ -782,20 +777,19 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } func synchronizedEncodingDebugPrint(_ string: String) { - if(synchronizedEncodingDebug && !encodingLiveVideo) { print(string) } + if synchronizedEncodingDebug && !encodingLiveVideo { print(string) } } } - public extension Timestamp { - init(_ time:CMTime) { + init(_ time: CMTime) { self.value = time.value self.timescale = time.timescale - self.flags = TimestampFlags(rawValue:time.flags.rawValue) + self.flags = TimestampFlags(rawValue: time.flags.rawValue) self.epoch = time.epoch } - var asCMTime:CMTime { + var asCMTime: CMTime { get { return CMTimeMakeWithEpoch(value: value, timescale: timescale, epoch: epoch) } diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 7240640c..2a1d18b9 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -71,7 +71,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } public var hasTarget: Bool { targets.count > 0 } - var framebufferUserInfo: [AnyHashable:Any]? + var framebufferUserInfo: [AnyHashable: Any]? var observations = [NSKeyValueObservation]() struct SeekingInfo: Equatable { @@ -100,7 +100,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } private var didTriggerEndTimeObserver = false private var didRegisterPlayerNotification = false - private var didNotifyEndedItem: AVPlayerItem? = nil + private var didNotifyEndedItem: AVPlayerItem? private var retryPlaying = false /// Return the current item. If currentItem was played to end, will return next one public var actualCurrentItem: AVPlayerItem? { @@ -374,7 +374,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { // https://developer.apple.com/library/archive/qa/qa1820/_index.html#//apple_ref/doc/uid/DTS40016828 guard !isSeeking, let seekingInfo = nextSeeking, isReadyToPlay else { return } isSeeking = true - seek(to: seekingInfo.time, toleranceBefore:seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] success in + seek(to: seekingInfo.time, toleranceBefore: seekingInfo.toleranceBefore, toleranceAfter: seekingInfo.toleranceAfter) { [weak self] _ in // debugPrint("movie player did seek to time:\(seekingInfo.time.seconds) success:\(success) shouldPlayAfterSeeking:\(seekingInfo.shouldPlayAfterSeeking)") guard let self = self else { return } if seekingInfo.shouldPlayAfterSeeking && self.isPlaying { @@ -401,7 +401,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { _timeObserversUpdate { [weak self] in guard let self = self else { return } self.totalTimeObservers.append(timeObserver) - self.totalTimeObservers = self.totalTimeObservers.sorted { (lhs, rhs) in + self.totalTimeObservers = self.totalTimeObservers.sorted { lhs, rhs in return lhs.targetTime > rhs.targetTime } if self.isPlaying { @@ -480,7 +480,7 @@ private extension MoviePlayer { func _setupPlayerItemVideoOutput(for item: AVPlayerItem) { guard !item.outputs.contains(where: { $0 is AVPlayerItemVideoOutput }) else { return } - let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] + let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey): kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] let videoOutput = AVPlayerItemVideoOutput(outputSettings: outputSettings) videoOutput.suppressesPlayerRendering = true item.add(videoOutput) @@ -667,7 +667,7 @@ private extension MoviePlayer { guard (notification.object as? AVPlayerItem) == currentItem else { return } didNotifyEndedItem = currentItem if needAddItemAfterDidEndNotify { - DispatchQueue.main.async() { [weak self] in + DispatchQueue.main.async { [weak self] in guard let self = self else { return } self.needAddItemAfterDidEndNotify = false self.pendingNewItems.forEach { self.insert($0, after: nil) } @@ -677,7 +677,7 @@ private extension MoviePlayer { } } } else { - DispatchQueue.main.async() { [weak self] in + DispatchQueue.main.async { [weak self] in self?.onCurrentItemPlayToEnd() } } diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index bcbab83b..de8b128d 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -2,32 +2,31 @@ import OpenGLES import UIKit // TODO: Find a way to warn people if they set this after the context has been created -var imageProcessingShareGroup:EAGLSharegroup? = nil +var imageProcessingShareGroup: EAGLSharegroup? var dispatchQueKeyValueCounter = 81 public class OpenGLContext: SerialDispatch { - public private(set) lazy var framebufferCache:FramebufferCache = { - return FramebufferCache(context:self) + public private(set) lazy var framebufferCache: FramebufferCache = { + return FramebufferCache(context: self) }() - var shaderCache:[String:ShaderProgram] = [:] - public let standardImageVBO:GLuint - var textureVBOs:[Rotation:GLuint] = [:] + var shaderCache: [String: ShaderProgram] = [:] + public let standardImageVBO: GLuint + var textureVBOs: [Rotation: GLuint] = [:] - public let context:EAGLContext + public let context: EAGLContext - public private(set) lazy var passthroughShader:ShaderProgram = { - return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} + public private(set) lazy var passthroughShader: ShaderProgram = { + return crashOnShaderCompileFailure("OpenGLContext") { return try self.programForVertexShader(OneInputVertexShader, fragmentShader: PassthroughFragmentShader) } }() - public private(set) lazy var coreVideoTextureCache:CVOpenGLESTextureCache = { - var newTextureCache:CVOpenGLESTextureCache? = nil + public private(set) lazy var coreVideoTextureCache: CVOpenGLESTextureCache = { + var newTextureCache: CVOpenGLESTextureCache? let err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nil, self.context, nil, &newTextureCache) return newTextureCache! }() - - public let serialDispatchQueue:DispatchQueue + public let serialDispatchQueue: DispatchQueue public let dispatchQueueKey = DispatchSpecificKey() public let dispatchQueueKeyValue: Int public var executeStartTime: TimeInterval? @@ -38,14 +37,14 @@ public class OpenGLContext: SerialDispatch { init(queueLabel: String? = nil) { serialDispatchQueue = DispatchQueue(label: (queueLabel ?? "com.sunsetlakesoftware.GPUImage.processingQueue"), qos: .userInitiated) dispatchQueueKeyValue = dispatchQueKeyValueCounter - serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:dispatchQueueKeyValue) + serialDispatchQueue.setSpecific(key: dispatchQueueKey, value: dispatchQueueKeyValue) dispatchQueKeyValueCounter += 1 - let generatedContext:EAGLContext? + let generatedContext: EAGLContext? if let shareGroup = imageProcessingShareGroup { - generatedContext = EAGLContext(api:.openGLES2, sharegroup:shareGroup) + generatedContext = EAGLContext(api: .openGLES2, sharegroup: shareGroup) } else { - generatedContext = EAGLContext(api:.openGLES2) + generatedContext = EAGLContext(api: .openGLES2) } guard let concreteGeneratedContext = generatedContext else { @@ -55,7 +54,7 @@ public class OpenGLContext: SerialDispatch { self.context = concreteGeneratedContext EAGLContext.setCurrent(concreteGeneratedContext) - standardImageVBO = generateVBO(for:standardImageVertices) + standardImageVBO = generateVBO(for: standardImageVertices) generateTextureVBOs() glDisable(GLenum(GL_DEPTH_TEST)) @@ -66,8 +65,7 @@ public class OpenGLContext: SerialDispatch { // MARK: Rendering public func makeCurrentContext() { - if (EAGLContext.current() != self.context) - { + if EAGLContext.current() != self.context { EAGLContext.setCurrent(self.context) } } @@ -76,7 +74,6 @@ public class OpenGLContext: SerialDispatch { self.context.presentRenderbuffer(Int(GL_RENDERBUFFER)) } - // MARK: - // MARK: Device capabilities @@ -88,25 +85,25 @@ public class OpenGLContext: SerialDispatch { #endif } - public var maximumTextureSizeForThisDevice:GLint {get { return _maximumTextureSizeForThisDevice } } - private lazy var _maximumTextureSizeForThisDevice:GLint = { + public var maximumTextureSizeForThisDevice: GLint { get { return _maximumTextureSizeForThisDevice } } + private lazy var _maximumTextureSizeForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_SIZE) }() - public var maximumTextureUnitsForThisDevice:GLint {get { return _maximumTextureUnitsForThisDevice } } - private lazy var _maximumTextureUnitsForThisDevice:GLint = { + public var maximumTextureUnitsForThisDevice: GLint { get { return _maximumTextureUnitsForThisDevice } } + private lazy var _maximumTextureUnitsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_IMAGE_UNITS) }() - public var maximumVaryingVectorsForThisDevice:GLint {get { return _maximumVaryingVectorsForThisDevice } } - private lazy var _maximumVaryingVectorsForThisDevice:GLint = { + public var maximumVaryingVectorsForThisDevice: GLint { get { return _maximumVaryingVectorsForThisDevice } } + private lazy var _maximumVaryingVectorsForThisDevice: GLint = { return self.openGLDeviceSettingForOption(GL_MAX_VARYING_VECTORS) }() - lazy var extensionString:String = { - return self.runOperationSynchronously{ + lazy var extensionString: String = { + return self.runOperationSynchronously { self.makeCurrentContext() - return String(cString:unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to:UnsafePointer.self)) + return String(cString: unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to: UnsafePointer.self)) } }() } diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 1f4f7006..28cc9a1f 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -24,10 +24,10 @@ public enum PictureInputError: Error, CustomStringConvertible { public class PictureInput: ImageSource { public let targets = TargetContainer() - public private(set) var imageFramebuffer:Framebuffer? - public var framebufferUserInfo:[AnyHashable:Any]? + public private(set) var imageFramebuffer: Framebuffer? + public var framebufferUserInfo: [AnyHashable: Any]? public let imageName: String - var hasProcessedImage:Bool = false + var hasProcessedImage: Bool = false public init( image: CGImage, @@ -40,21 +40,21 @@ public class PictureInput: ImageSource { let heightOfImage = GLint(image.height) // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. - guard((widthOfImage > 0) && (heightOfImage > 0)) else { throw PictureInputError.zeroSizedImageError } + guard (widthOfImage > 0) && (heightOfImage > 0) else { throw PictureInputError.zeroSizedImageError } var widthToUseForTexture = widthOfImage var heightToUseForTexture = heightOfImage var shouldRedrawUsingCoreGraphics = false // For now, deal with images larger than the maximum texture size by resizing to be within that limit - let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width:Float(widthOfImage), height:Float(heightOfImage)))) - if ((scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage)) { + let scaledImageSizeToFitOnGPU = GLSize(sharedImageProcessingContext.sizeThatFitsWithinATextureForSize(Size(width: Float(widthOfImage), height: Float(heightOfImage)))) + if (scaledImageSizeToFitOnGPU.width != widthOfImage) && (scaledImageSizeToFitOnGPU.height != heightOfImage) { widthToUseForTexture = scaledImageSizeToFitOnGPU.width heightToUseForTexture = scaledImageSizeToFitOnGPU.height shouldRedrawUsingCoreGraphics = true } - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill let powerClosestToWidth = ceil(log2(Float(widthToUseForTexture))) let powerClosestToHeight = ceil(log2(Float(heightToUseForTexture))) @@ -64,33 +64,32 @@ public class PictureInput: ImageSource { shouldRedrawUsingCoreGraphics = true } - var imageData:UnsafeMutablePointer! - var dataFromImageDataProvider:CFData! + var imageData: UnsafeMutablePointer! + var dataFromImageDataProvider: CFData! var format = GL_BGRA - if (!shouldRedrawUsingCoreGraphics) { + if !shouldRedrawUsingCoreGraphics { /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to * tell GL about the memory layout with GLES. */ - if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) - { + if (image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8) { shouldRedrawUsingCoreGraphics = true } else { /* Check that the bitmap pixel format is compatible with GL */ let bitmapInfo = image.bitmapInfo - if (bitmapInfo.contains(.floatComponents)) { + if bitmapInfo.contains(.floatComponents) { /* We don't support float components for use directly in GL */ shouldRedrawUsingCoreGraphics = true } else { - let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) - if (bitmapInfo.contains(.byteOrder32Little)) { + let alphaInfo = CGImageAlphaInfo(rawValue: bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) + if bitmapInfo.contains(.byteOrder32Little) { /* Little endian, for alpha-first we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { + if (alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst) { shouldRedrawUsingCoreGraphics = true } - } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { + } else if (bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big)) { /* Big endian, for alpha-last we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { + if (alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast) { shouldRedrawUsingCoreGraphics = true } else { /* Can access directly using GL_RGBA pixel format */ @@ -101,53 +100,53 @@ public class PictureInput: ImageSource { } } - try sharedImageProcessingContext.runOperationSynchronously{ - if (shouldRedrawUsingCoreGraphics) { + try sharedImageProcessingContext.runOperationSynchronously { + if shouldRedrawUsingCoreGraphics { // For resized or incompatible image: redraw - imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData = UnsafeMutablePointer.allocate(capacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() - let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) + let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html - imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) + imageContext?.draw(image, in: CGRect(x: 0.0, y: 0.0, width: CGFloat(widthToUseForTexture), height: CGFloat(heightToUseForTexture))) } else { // Access the raw image bytes directly guard let data = image.dataProvider?.data else { throw PictureInputError.dataProviderNilError } dataFromImageDataProvider = data - imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)) + imageData = UnsafeMutablePointer(mutating: CFDataGetBytePtr(dataFromImageDataProvider)) } // TODO: Alter orientation based on metadata from photo - self.imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) + self.imageFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: orientation, size: GLSize(width: widthToUseForTexture, height: heightToUseForTexture), textureOnly: true) self.imageFramebuffer!.lock() glBindTexture(GLenum(GL_TEXTURE_2D), self.imageFramebuffer!.texture) - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR_MIPMAP_LINEAR) } glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, widthToUseForTexture, heightToUseForTexture, 0, GLenum(format), GLenum(GL_UNSIGNED_BYTE), imageData) - if (smoothlyScaleOutput) { + if smoothlyScaleOutput { glGenerateMipmap(GLenum(GL_TEXTURE_2D)) } glBindTexture(GLenum(GL_TEXTURE_2D), 0) } - if (shouldRedrawUsingCoreGraphics) { + if shouldRedrawUsingCoreGraphics { imageData.deallocate() } } - public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { - try self.init(image:image.cgImage!, imageName:"UIImage", smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) + public convenience init(image: UIImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { + try self.init(image: image.cgImage!, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation ?? image.imageOrientation.gpuOrientation) } - public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation? = nil) throws { - guard let image = UIImage(named:imageName) else { throw PictureInputError.noSuchImageError(imageName: imageName) } - try self.init(image:image.cgImage!, imageName: imageName, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation ?? image.imageOrientation.gpuOrientation) + public convenience init(imageName: String, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { + guard let image = UIImage(named: imageName) else { throw PictureInputError.noSuchImageError(imageName: imageName) } + try self.init(image: image.cgImage!, imageName: imageName, smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation ?? image.imageOrientation.gpuOrientation) } public convenience init(image: UIImage, size: CGSize?, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { @@ -170,7 +169,7 @@ public class PictureInput: ImageSource { newImage = newImage.transformed(by: scaleTransform) // Crop and generate image - let cropRect = CGRect(x: (newImage.extent.size.width - targetSize.width) / 2, y: (newImage.extent.size.height - targetSize.height)/2, width: targetSize.width, height: targetSize.height) + let cropRect = CGRect(x: (newImage.extent.size.width - targetSize.width) / 2, y: (newImage.extent.size.height - targetSize.height) / 2, width: targetSize.width, height: targetSize.height) let context = CIContext(options: nil) cgImage = context.createCGImage(newImage, from: cropRect)! @@ -181,23 +180,23 @@ public class PictureInput: ImageSource { } deinit { - //debugPrint("Deallocating operation: \(self)") + // debugPrint("Deallocating operation: \(self)") self.imageFramebuffer?.unlock() } - public func processImage(synchronously:Bool = false) { + public func processImage(synchronously: Bool = false) { self.imageFramebuffer?.userInfo = self.framebufferUserInfo if synchronously { - sharedImageProcessingContext.runOperationSynchronously{ + sharedImageProcessingContext.runOperationSynchronously { if let framebuffer = self.imageFramebuffer { self.updateTargetsWithFramebuffer(framebuffer) self.hasProcessedImage = true } } } else { - sharedImageProcessingContext.runOperationAsynchronously{ + sharedImageProcessingContext.runOperationAsynchronously { if let framebuffer = self.imageFramebuffer { self.updateTargetsWithFramebuffer(framebuffer) self.hasProcessedImage = true @@ -206,7 +205,7 @@ public class PictureInput: ImageSource { } } - public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { // This gets called after the pipline gets adjusted and needs an image it // Disabled so we can adjust/prepare the pipline freely without worrying an old framebuffer will get pushed through it // If after changing the pipline you need the prior frame buffer to be reprocessed, call processImage() again. diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 81501a0b..35be3ac5 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -7,19 +7,19 @@ public enum PictureFileFormat { } public class PictureOutput: ImageConsumer { - public var encodedImageAvailableCallback:((Data) -> ())? - public var encodedImageFormat:PictureFileFormat = .png + public var encodedImageAvailableCallback: ((Data) -> Void)? + public var encodedImageFormat: PictureFileFormat = .png public var encodedJPEGImageCompressionQuality: CGFloat = 0.8 - public var imageAvailableCallback:((UIImage) -> ())? - public var cgImageAvailableCallback:((CGImage) -> ())? - public var onlyCaptureNextFrame:Bool = true - public var keepImageAroundForSynchronousCapture:Bool = false + public var imageAvailableCallback: ((UIImage) -> Void)? + public var cgImageAvailableCallback: ((CGImage) -> Void)? + public var onlyCaptureNextFrame: Bool = true + public var keepImageAroundForSynchronousCapture: Bool = false public var exportWithAlpha = false - var storedFramebuffer:Framebuffer? + var storedFramebuffer: Framebuffer? public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - var url:URL! + public let maximumInputs: UInt = 1 + var url: URL! public init() { debugPrint("PictureOutput init") @@ -29,13 +29,13 @@ public class PictureOutput: ImageConsumer { debugPrint("PictureOutput deinit") } - public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url: URL, format: PictureFileFormat) { onlyCaptureNextFrame = true encodedImageFormat = format self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured encodedImageAvailableCallback = {imageData in do { - try imageData.write(to: self.url, options:.atomic) + try imageData.write(to: self.url, options: .atomic) } catch { // TODO: Handle this better print("WARNING: Couldn't save image with error:\(error)") @@ -44,25 +44,25 @@ public class PictureOutput: ImageConsumer { } // TODO: Replace with texture caches - func cgImageFromFramebuffer(_ framebuffer:Framebuffer) -> CGImage { - let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) + func cgImageFromFramebuffer(_ framebuffer: Framebuffer) -> CGImage { + let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: framebuffer.orientation, size: framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.red) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings: ShaderUniformSettings(), vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) let data = UnsafeMutablePointer.allocate(capacity: imageByteSize) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) renderFramebuffer.unlock() - guard let dataProvider = CGDataProvider(dataInfo:nil, data:data, size:imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not allocate a CGDataProvider")} + guard let dataProvider = CGDataProvider(dataInfo: nil, data: data, size: imageByteSize, releaseData: dataProviderReleaseCallback) else { fatalError("Could not allocate a CGDataProvider") } let defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB() let bitmapInfo = exportWithAlpha ? CGBitmapInfo(rawValue: CGImageAlphaInfo.last.rawValue) : CGBitmapInfo() - return CGImage(width:Int(framebuffer.size.width), height:Int(framebuffer.size.height), bitsPerComponent:8, bitsPerPixel:32, bytesPerRow:4 * Int(framebuffer.size.width), space:defaultRGBColorSpace, bitmapInfo: bitmapInfo, provider:dataProvider, decode:nil, shouldInterpolate:false, intent:.defaultIntent)! + return CGImage(width: Int(framebuffer.size.width), height: Int(framebuffer.size.height), bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: 4 * Int(framebuffer.size.width), space: defaultRGBColorSpace, bitmapInfo: bitmapInfo, provider: dataProvider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)! } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { if keepImageAroundForSynchronousCapture { storedFramebuffer?.unlock() storedFramebuffer = framebuffer @@ -82,7 +82,7 @@ public class PictureOutput: ImageConsumer { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) // TODO: Let people specify orientations - let image = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) + let image = UIImage(cgImage: cgImageFromBytes, scale: 1.0, orientation: .up) imageCallback(image) @@ -93,8 +93,8 @@ public class PictureOutput: ImageConsumer { if let imageCallback = encodedImageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) - let image = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) - let imageData:Data + let image = UIImage(cgImage: cgImageFromBytes, scale: 1.0, orientation: .up) + let imageData: Data switch encodedImageFormat { case .png: imageData = image.pngData()! // TODO: Better error handling here case .jpeg: imageData = image.jpegData(compressionQuality: encodedJPEGImageCompressionQuality)! @@ -109,12 +109,12 @@ public class PictureOutput: ImageConsumer { } public func synchronousImageCapture() -> UIImage { - var outputImage:UIImage! - sharedImageProcessingContext.runOperationSynchronously{ + var outputImage: UIImage! + sharedImageProcessingContext.runOperationSynchronously { guard let currentFramebuffer = storedFramebuffer else { fatalError("Synchronous access requires keepImageAroundForSynchronousCapture to be set to true") } let cgImageFromBytes = cgImageFromFramebuffer(currentFramebuffer) - outputImage = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) + outputImage = UIImage(cgImage: cgImageFromBytes, scale: 1.0, orientation: .up) } return outputImage @@ -122,35 +122,35 @@ public class PictureOutput: ImageConsumer { } public extension ImageSource { - func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { + func saveNextFrameToURL(_ url: URL, format: PictureFileFormat) { let pictureOutput = PictureOutput() - pictureOutput.saveNextFrameToURL(url, format:format) + pictureOutput.saveNextFrameToURL(url, format: format) self --> pictureOutput } } public extension UIImage { - func filterWithOperation(_ operation:T) throws -> UIImage { - return try filterWithPipeline{input, output in + func filterWithOperation(_ operation: T) throws -> UIImage { + return try filterWithPipeline {input, output in input --> operation --> output } } - func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) throws -> UIImage { - let picture = try PictureInput(image:self) - var outputImage:UIImage? + func filterWithPipeline(_ pipeline: (PictureInput, PictureOutput) -> Void) throws -> UIImage { + let picture = try PictureInput(image: self) + var outputImage: UIImage? let pictureOutput = PictureOutput() pictureOutput.onlyCaptureNextFrame = true pictureOutput.imageAvailableCallback = {image in outputImage = image } pipeline(picture, pictureOutput) - picture.processImage(synchronously:true) + picture.processImage(synchronously: true) return outputImage! } } // Why are these flipped in the callback definition? -func dataProviderReleaseCallback(_ context:UnsafeMutableRawPointer?, data:UnsafeRawPointer, size:Int) { +func dataProviderReleaseCallback(_ context: UnsafeMutableRawPointer?, data: UnsafeRawPointer, size: Int) { data.deallocate() } diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 545d5a04..b0aaadd5 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -11,40 +11,40 @@ public protocol RenderViewDelegate: class { } // TODO: Add support for transparency -public class RenderView:UIView, ImageConsumer { - public weak var delegate:RenderViewDelegate? +public class RenderView: UIView, ImageConsumer { + public weak var delegate: RenderViewDelegate? public var backgroundRenderColor = Color.black public var fillMode = FillMode.preserveAspectRatio - public var orientation:ImageOrientation = .portrait + public var orientation: ImageOrientation = .portrait public var cropFrame: CGRect? - public var sizeInPixels:Size { Size(width:Float(frame.size.width * contentScaleFactor), height:Float(frame.size.height * contentScaleFactor)) } + public var sizeInPixels: Size { Size(width: Float(frame.size.width * contentScaleFactor), height: Float(frame.size.height * contentScaleFactor)) } public let sources = SourceContainer() - public let maximumInputs:UInt = 1 - var displayFramebuffer:GLuint? - var displayRenderbuffer:GLuint? - var backingSize = GLSize(width:0, height:0) + public let maximumInputs: UInt = 1 + var displayFramebuffer: GLuint? + var displayRenderbuffer: GLuint? + var backingSize = GLSize(width: 0, height: 0) var renderSize = CGSize.zero private var isAppForeground: Bool = true - private lazy var displayShader:ShaderProgram = { + private lazy var displayShader: ShaderProgram = { return sharedImageProcessingContext.passthroughShader }() private var internalLayer: CAEAGLLayer! - required public init?(coder:NSCoder) { - super.init(coder:coder) + required public init?(coder: NSCoder) { + super.init(coder: coder) self.commonInit() } - public override init(frame:CGRect) { - super.init(frame:frame) + public override init(frame: CGRect) { + super.init(frame: frame) self.commonInit() } - override public class var layerClass:Swift.AnyClass { + override public class var layerClass: Swift.AnyClass { get { return CAEAGLLayer.self } @@ -69,7 +69,7 @@ public class RenderView:UIView, ImageConsumer { let eaglLayer = self.layer as! CAEAGLLayer eaglLayer.isOpaque = true - eaglLayer.drawableProperties = [kEAGLDrawablePropertyRetainedBacking: NSNumber(value:false), kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8] + eaglLayer.drawableProperties = [kEAGLDrawablePropertyRetainedBacking: NSNumber(value: false), kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8] eaglLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill // Just for safety to prevent distortion NotificationCenter.default.addObserver(forName: UIApplication.didBecomeActiveNotification, object: nil, queue: .main) { [weak self] _ in @@ -103,12 +103,12 @@ public class RenderView:UIView, ImageConsumer { // Fix crash when calling OpenGL when app is not foreground guard isAppForeground else { return false } - var newDisplayFramebuffer:GLuint = 0 + var newDisplayFramebuffer: GLuint = 0 glGenFramebuffers(1, &newDisplayFramebuffer) displayFramebuffer = newDisplayFramebuffer glBindFramebuffer(GLenum(GL_FRAMEBUFFER), displayFramebuffer!) - var newDisplayRenderbuffer:GLuint = 0 + var newDisplayRenderbuffer: GLuint = 0 glGenRenderbuffers(1, &newDisplayRenderbuffer) displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) @@ -122,18 +122,18 @@ public class RenderView:UIView, ImageConsumer { // and then the view size would change to the new size at the next layout pass and distort our already drawn image. // Since we do not call this function often we do not need to worry about the performance impact of calling flush. CATransaction.flush() - sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.internalLayer) + sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from: self.internalLayer) - var backingWidth:GLint = 0 - var backingHeight:GLint = 0 + var backingWidth: GLint = 0 + var backingHeight: GLint = 0 glGetRenderbufferParameteriv(GLenum(GL_RENDERBUFFER), GLenum(GL_RENDERBUFFER_WIDTH), &backingWidth) glGetRenderbufferParameteriv(GLenum(GL_RENDERBUFFER), GLenum(GL_RENDERBUFFER_HEIGHT), &backingHeight) - backingSize = GLSize(width:backingWidth, height:backingHeight) + backingSize = GLSize(width: backingWidth, height: backingHeight) - guard (backingWidth > 0 && backingHeight > 0) else { + guard backingWidth > 0 && backingHeight > 0 else { print("WARNING: View had a zero size") - if(self.internalLayer.bounds.width > 0 && self.internalLayer.bounds.height > 0) { + if self.internalLayer.bounds.width > 0 && self.internalLayer.bounds.height > 0 { print("WARNING: View size \(self.internalLayer.bounds) may be too large ") } return false @@ -142,8 +142,8 @@ public class RenderView:UIView, ImageConsumer { glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_RENDERBUFFER), displayRenderbuffer!) let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)) - if (status != GLenum(GL_FRAMEBUFFER_COMPLETE)) { - print("WARNING: Display framebuffer creation failed with error: \(FramebufferCreationError(errorCode:status))") + if status != GLenum(GL_FRAMEBUFFER_COMPLETE) { + print("WARNING: Display framebuffer creation failed with error: \(FramebufferCreationError(errorCode: status))") return false } @@ -181,17 +181,16 @@ public class RenderView:UIView, ImageConsumer { glViewport(0, 0, backingSize.width, backingSize.height) } - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { let cleanup: () -> Void = { [weak self] in guard let self = self else { return } - if(self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false) { + if self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false { DispatchQueue.main.async { self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) framebuffer.unlock() } - } - else { + } else { self.delegate?.didDisplayFramebuffer(renderView: self, framebuffer: framebuffer) framebuffer.unlock() } @@ -203,7 +202,7 @@ public class RenderView:UIView, ImageConsumer { // Fix crash when calling OpenGL when app is not foreground guard self.isAppForeground else { return } - if (self.displayFramebuffer == nil && !self.createDisplayFramebuffer()) { + if self.displayFramebuffer == nil && !self.createDisplayFramebuffer() { cleanup() // Bail if we couldn't successfully create the displayFramebuffer return @@ -226,8 +225,8 @@ public class RenderView:UIView, ImageConsumer { inputTexture = framebuffer.texturePropertiesForTargetOrientation(self.orientation) } - let scaledVertices = self.fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(self.orientation), toFitSize:self.backingSize) - renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[inputTexture]) + let scaledVertices = self.fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize: framebuffer.sizeForTargetOrientation(self.orientation), toFitSize: self.backingSize) + renderQuadWithShader(self.displayShader, vertices: scaledVertices, inputTextures: [inputTexture]) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.displayRenderbuffer!) @@ -236,7 +235,7 @@ public class RenderView:UIView, ImageConsumer { cleanup() } - if(self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false) { + if self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false { // CAUTION: Never call sync from the sharedImageProcessingContext, it will cause cyclic thread deadlocks // If you are curious, change this to sync, then try trimming/scrubbing a video // Before that happens you will get a deadlock when someone calls runOperationSynchronously since the main thread is blocked @@ -246,8 +245,7 @@ public class RenderView:UIView, ImageConsumer { sharedImageProcessingContext.runOperationAsynchronously(work) } - } - else { + } else { self.delegate?.willDisplayFramebuffer(renderView: self, framebuffer: framebuffer) work() diff --git a/framework/Source/iOS/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift index 17e9285d..23d1d7f8 100644 --- a/framework/Source/iOS/SpeakerOutput.swift +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -12,7 +12,6 @@ import AudioToolbox import AVFoundation public class SpeakerOutput: AudioEncodingTarget { - public var changesAudioSession = true public private(set) var isPlaying = false @@ -30,24 +29,23 @@ public class SpeakerOutput: AudioEncodingTarget { } } - var processingGraph:AUGraph? - var mixerUnit:AudioUnit? + var processingGraph: AUGraph? + var mixerUnit: AudioUnit? var firstBufferReached = false - let outputBus:AudioUnitElement = 0 - let inputBus:AudioUnitElement = 1 + let outputBus: AudioUnitElement = 0 + let inputBus: AudioUnitElement = 1 let unitSize = UInt32(MemoryLayout.size) - let bufferUnit:UInt32 = 655360 + let bufferUnit: UInt32 = 655360 var circularBuffer = TPCircularBuffer() - let circularBufferSize:UInt32 + let circularBufferSize: UInt32 - var rescueBuffer:UnsafeMutableRawPointer? - let rescueBufferSize:Int - var rescueBufferContentsSize:UInt32 = 0 - + var rescueBuffer: UnsafeMutableRawPointer? + let rescueBufferSize: Int + var rescueBufferContentsSize: UInt32 = 0 public init() { circularBufferSize = bufferUnit * unitSize @@ -70,7 +68,7 @@ public class SpeakerOutput: AudioEncodingTarget { // MARK: Playback control public func start() { - if(isPlaying || processingGraph == nil) { return } + if isPlaying || processingGraph == nil { return } AUGraphStart(processingGraph!) @@ -78,7 +76,7 @@ public class SpeakerOutput: AudioEncodingTarget { } public func cancel() { - if(!isPlaying || processingGraph == nil) { return } + if !isPlaying || processingGraph == nil { return } AUGraphStop(processingGraph!) @@ -94,12 +92,11 @@ public class SpeakerOutput: AudioEncodingTarget { // MARK: AudioEncodingTarget protocol public func activateAudioTrack() throws { - if(changesAudioSession) { + if changesAudioSession { do { try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.ambient) try AVAudioSession.sharedInstance().setActive(true) - } - catch { + } catch { print("ERROR: Unable to set audio session: \(error)") } } @@ -144,7 +141,7 @@ public class SpeakerOutput: AudioEncodingTarget { // Get a link to the mixer AU so we can talk to it later AUGraphNodeInfo(processingGraph!, mixerNode, nil, &mixerUnit) - var elementCount:UInt32 = 1 + var elementCount: UInt32 = 1 AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_ElementCount, kAudioUnitScope_Input, 0, &elementCount, UInt32(MemoryLayout.size)) // Set output callback, this is how audio sample data will be retrieved @@ -182,14 +179,14 @@ public class SpeakerOutput: AudioEncodingTarget { public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { defer { - if(shouldInvalidateSampleWhenDone) { + if shouldInvalidateSampleWhenDone { CMSampleBufferInvalidate(sampleBuffer) } } - if(!isReadyForMoreMediaData || !isPlaying) { return } + if !isReadyForMoreMediaData || !isPlaying { return } - if(!firstBufferReached) { + if !firstBufferReached { firstBufferReached = true // Get the format information of the sample let desc = CMSampleBufferGetFormatDescription(sampleBuffer)! @@ -216,7 +213,7 @@ public class SpeakerOutput: AudioEncodingTarget { // Populate an AudioBufferList with the sample var audioBufferList = AudioBufferList() - var blockBuffer:CMBlockBuffer? + var blockBuffer: CMBlockBuffer? CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, bufferListSizeNeededOut: nil, bufferListOut: &audioBufferList, bufferListSize: MemoryLayout.size, blockBufferAllocator: nil, blockBufferMemoryAllocator: nil, flags: kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, blockBufferOut: &blockBuffer) // This is actually doing audioBufferList.mBuffers[0] @@ -232,14 +229,13 @@ public class SpeakerOutput: AudioEncodingTarget { // The circular buffer has not been proceseed quickly enough and has filled up. // Disable reading any further samples and save this last buffer so we don't lose it. - if(!didCopyBytes) { - //print("TPCircularBuffer limit reached: \(sampleSize) Bytes") + if !didCopyBytes { + // print("TPCircularBuffer limit reached: \(sampleSize) Bytes") isReadyForMoreMediaData = false self.writeToRescueBuffer(audioBuffer.mData, sampleSize) - } - else { + } else { hasBuffer = true } } @@ -252,15 +248,14 @@ public class SpeakerOutput: AudioEncodingTarget { // MARK: Rescue buffer func writeToRescueBuffer(_ src: UnsafeRawPointer!, _ size: UInt32) { - if(rescueBufferContentsSize > 0) { + if rescueBufferContentsSize > 0 { print("WARNING: Writing to rescue buffer with contents already inside") } - if(size > rescueBufferSize) { + if size > rescueBufferSize { print("WARNING: Unable to allocate enought space for rescue buffer, dropping audio sample") - } - else { - if(rescueBuffer == nil) { + } else { + if rescueBuffer == nil { rescueBuffer = malloc(rescueBufferSize) } @@ -270,9 +265,9 @@ public class SpeakerOutput: AudioEncodingTarget { } func copyRescueBufferContentsToCircularBuffer() { - if(rescueBufferContentsSize > 0) { + if rescueBufferContentsSize > 0 { let didCopyBytes = TPCircularBufferProduceBytes(&circularBuffer, rescueBuffer, rescueBufferContentsSize) - if(!didCopyBytes) { + if !didCopyBytes { print("WARNING: Unable to copy rescue buffer into main buffer, dropping audio sample") } rescueBufferContentsSize = 0 @@ -281,13 +276,12 @@ public class SpeakerOutput: AudioEncodingTarget { } func playbackCallback( - inRefCon:UnsafeMutableRawPointer, - ioActionFlags:UnsafeMutablePointer, - inTimeStamp:UnsafePointer, - inBusNumber:UInt32, - inNumberFrames:UInt32, - ioData:UnsafeMutablePointer?) -> OSStatus { - + inRefCon: UnsafeMutableRawPointer, + ioActionFlags: UnsafeMutablePointer, + inTimeStamp: UnsafePointer, + inBusNumber: UInt32, + inNumberFrames: UInt32, + ioData: UnsafeMutablePointer?) -> OSStatus { let audioBuffer = ioData!.pointee.mBuffers let numberOfChannels = audioBuffer.mNumberChannels let outSamples = audioBuffer.mData @@ -297,25 +291,25 @@ func playbackCallback( let p = bridgeRawPointer(inRefCon) as! SpeakerOutput - if(p.hasBuffer && p.isPlaying) { - var availableBytes:UInt32 = 0 + if p.hasBuffer && p.isPlaying { + var availableBytes: UInt32 = 0 let bufferTail = TPCircularBufferTail(&p.circularBuffer, &availableBytes) let requestedBytesSize = inNumberFrames * p.unitSize * numberOfChannels let bytesToRead = min(availableBytes, requestedBytesSize) - if(!p.isMuted) { + if !p.isMuted { // Copy the bytes from the circular buffer into the outSample memcpy(outSamples, bufferTail, Int(bytesToRead)) } // Clear what we just read out of the circular buffer TPCircularBufferConsume(&p.circularBuffer, bytesToRead) - if(availableBytes <= requestedBytesSize*2) { + if availableBytes <= requestedBytesSize * 2 { p.isReadyForMoreMediaData = true } - if(availableBytes <= requestedBytesSize) { + if availableBytes <= requestedBytesSize { p.hasBuffer = false } } @@ -323,11 +317,10 @@ func playbackCallback( return noErr } -func bridgeObject(_ obj : AnyObject) -> UnsafeMutableRawPointer { +func bridgeObject(_ obj: AnyObject) -> UnsafeMutableRawPointer { return UnsafeMutableRawPointer(Unmanaged.passUnretained(obj).toOpaque()) } -func bridgeRawPointer(_ ptr : UnsafeMutableRawPointer) -> AnyObject { +func bridgeRawPointer(_ ptr: UnsafeMutableRawPointer) -> AnyObject { return Unmanaged.fromOpaque(ptr).takeUnretainedValue() } - diff --git a/framework/Tests/Pipeline_Tests.swift b/framework/Tests/Pipeline_Tests.swift index dc8bfba8..47a92257 100755 --- a/framework/Tests/Pipeline_Tests.swift +++ b/framework/Tests/Pipeline_Tests.swift @@ -1,35 +1,35 @@ import XCTest -//@testable import GPUImage +// @testable import GPUImage class FakeOperation: ImageProcessingOperation { let targets = TargetContainer() let sources = SourceContainer() - var maximumInputs:UInt { get { return 1 } } // Computed property, so it can be overridden - let name:String + var maximumInputs: UInt { get { return 1 } } // Computed property, so it can be overridden + let name: String - init(name:String) { + init(name: String) { self.name = name } - func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { } - func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { } } class FakeRenderView: ImageConsumer { let sources = SourceContainer() - let maximumInputs:UInt = 1 + let maximumInputs: UInt = 1 - func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { } } class FakeCamera: ImageSource { let targets = TargetContainer() - func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) { } func newCameraFrame() { @@ -46,19 +46,18 @@ class FakeCamera: ImageSource { } class Pipeline_Tests: XCTestCase { - func testTargetContainer() { let targetContainer = TargetContainer() // All operations have been added and should have a strong reference - var operation1:FakeOperation? = FakeOperation(name:"Operation 1") - targetContainer.append(operation1!, indexAtTarget:0) - var operation2:FakeOperation? = FakeOperation(name:"Operation 2") - targetContainer.append(operation2!, indexAtTarget:0) - var operation3:FakeOperation? = FakeOperation(name:"Operation 3") - targetContainer.append(operation3!, indexAtTarget:0) - var operation4:FakeOperation? = FakeOperation(name:"Operation 4") - targetContainer.append(operation4!, indexAtTarget:0) + var operation1: FakeOperation? = FakeOperation(name: "Operation 1") + targetContainer.append(operation1!, indexAtTarget: 0) + var operation2: FakeOperation? = FakeOperation(name: "Operation 2") + targetContainer.append(operation2!, indexAtTarget: 0) + var operation3: FakeOperation? = FakeOperation(name: "Operation 3") + targetContainer.append(operation3!, indexAtTarget: 0) + var operation4: FakeOperation? = FakeOperation(name: "Operation 4") + targetContainer.append(operation4!, indexAtTarget: 0) for (index, (target, _)) in targetContainer.enumerated() { let operation = target as! FakeOperation @@ -101,7 +100,6 @@ class Pipeline_Tests: XCTestCase { } func testSourceContainer() { - } func testChaining() { diff --git a/framework/Tests/ShaderProgram_Tests.swift b/framework/Tests/ShaderProgram_Tests.swift index 23e45408..34533da7 100755 --- a/framework/Tests/ShaderProgram_Tests.swift +++ b/framework/Tests/ShaderProgram_Tests.swift @@ -6,14 +6,12 @@ public let TestBrokenVertexShader = "attribute vec4 position;\n attribute vec4 i public let TestBrokenFragmentShader = "varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void ma)\n {\n gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n " public let TestMismatchedFragmentShader = "varying vec2 textureCoordinateF;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n " - class ShaderProgram_Tests: XCTestCase { - func testExample() { sharedImageProcessingContext.makeCurrentContext() do { - let shaderProgram = try ShaderProgram(vertexShader:TestVertexShader, fragmentShader:TestFragmentShader) + let shaderProgram = try ShaderProgram(vertexShader: TestVertexShader, fragmentShader: TestFragmentShader) let temporaryPosition = shaderProgram.attributeIndex("position") XCTAssert(temporaryPosition != nil, "Could not find position attribute") XCTAssert(temporaryPosition == shaderProgram.attributeIndex("position"), "Could not retrieve the same position attribute") @@ -30,15 +28,15 @@ class ShaderProgram_Tests: XCTestCase { XCTFail("Should not have thrown error during shader compilation: \(error)") } - if ((try? ShaderProgram(vertexShader:TestBrokenVertexShader, fragmentShader:TestFragmentShader)) != nil) { + if (try? ShaderProgram(vertexShader: TestBrokenVertexShader, fragmentShader: TestFragmentShader)) != nil { XCTFail("Program should not have compiled correctly") } - if ((try? ShaderProgram(vertexShader:TestVertexShader, fragmentShader:TestBrokenFragmentShader)) != nil) { + if (try? ShaderProgram(vertexShader: TestVertexShader, fragmentShader: TestBrokenFragmentShader)) != nil { XCTFail("Program should not have compiled correctly") } - if ((try? ShaderProgram(vertexShader:TestVertexShader, fragmentShader:TestMismatchedFragmentShader)) != nil) { + if (try? ShaderProgram(vertexShader: TestVertexShader, fragmentShader: TestMismatchedFragmentShader)) != nil { XCTFail("Program should not have compiled correctly") } } From f629396966eb35fcbf2c06d3553c0d3c3e8b58ab Mon Sep 17 00:00:00 2001 From: Cokile Date: Fri, 5 Feb 2021 14:38:43 +0800 Subject: [PATCH 307/332] improve(input): create picture input with extra transforms --- framework/Source/iOS/PictureInput.swift | 68 +++++++++++++++++++++---- 1 file changed, 59 insertions(+), 9 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 28cc9a1f..27fcbd30 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -1,6 +1,19 @@ import OpenGLES import UIKit +public enum PictureInputTransformStep { + case scale(x: CGFloat, y: CGFloat) + case translation(tx: CGFloat, ty: CGFloat) + case rotation(angle: CGFloat) + + var isTranslation: Bool { + switch self { + case .translation: return true + default: return false + } + } +} + public enum PictureInputError: Error, CustomStringConvertible { case zeroSizedImageError case dataProviderNilError @@ -149,27 +162,64 @@ public class PictureInput: ImageSource { try self.init(image: image.cgImage!, imageName: imageName, smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation ?? image.imageOrientation.gpuOrientation) } - public convenience init(image: UIImage, size: CGSize?, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { + public convenience init(image: UIImage, size: CGSize?, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil, transforms: [[PictureInputTransformStep]]? = nil) throws { var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation var cgImage: CGImage = image.cgImage! if let targetSize = size { autoreleasepool { // Get CIImage with orientation - guard var newImage = CIImage(image: image, options: - [.applyOrientationProperty: true, - .properties: [kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue]]) else { - return - } + guard var newImage = CIImage( + image: image, + options: [ + .applyOrientationProperty: true, + .properties: [ + kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue + ] + ] + ) else { return } // Scale let ratioW = targetSize.width / image.size.width let ratioH = targetSize.height / image.size.height let fillRatio = max(ratioW, ratioH) - let scaleTransform = CGAffineTransform(scaleX: fillRatio, y: fillRatio) - newImage = newImage.transformed(by: scaleTransform) + newImage = newImage.transformed(by: CGAffineTransform(scaleX: fillRatio, y: fillRatio)) + + var scaleX: CGFloat = 1 + var scaleY: CGFloat = 1 + var translationX: CGFloat = 0 + var translationY: CGFloat = 0 + if let stepGroups = transforms, !stepGroups.isEmpty { + var extraTransform = CGAffineTransform.identity + for group in stepGroups { + var groupTransform = CGAffineTransform.identity + for step in group { + switch step { + case let .scale(x, y): + if group.contains(where: { $0.isTranslation }) { + scaleX *= x + scaleY *= y + } + groupTransform = groupTransform.concatenating(.init(scaleX: x, y: y)) + case let .translation(tx, ty): + translationX += (tx * scaleX * targetSize.width) + translationY += (ty * scaleY * targetSize.height) + groupTransform = groupTransform.concatenating(.init(translationX: translationX, y: translationY)) + case let .rotation(angle): + groupTransform = groupTransform.concatenating(.init(rotationAngle: angle)) + } + } + extraTransform = extraTransform.concatenating(groupTransform) + } + newImage = newImage.transformed(by: extraTransform) + } // Crop and generate image - let cropRect = CGRect(x: (newImage.extent.size.width - targetSize.width) / 2, y: (newImage.extent.size.height - targetSize.height) / 2, width: targetSize.width, height: targetSize.height) + let cropRect = CGRect( + x: newImage.extent.origin.x - translationX + (newImage.extent.size.width - targetSize.width) / 2, + y: newImage.extent.origin.y + translationY + (newImage.extent.size.height - targetSize.height) / 2, + width: targetSize.width, + height: targetSize.height + ) let context = CIContext(options: nil) cgImage = context.createCGImage(newImage, from: cropRect)! From 1541ebb195627adafeff60958d1f518ce3b9f778 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 8 Mar 2021 16:00:39 +0800 Subject: [PATCH 308/332] fix: fix camera doesn't stop sometimes --- framework/Source/iOS/Camera.swift | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 0eaaf363..31a41db3 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -438,8 +438,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - public func stopCapture() { - if captureSession.isRunning { + public func stopCapture(force: Bool = false) { + // NOTE: Sometime camera is actually running, but isRunning is false. When it happens, set force to true. + if captureSession.isRunning || force { captureSession.stopRunning() } } From 9155a6ea12ba1073b7b7ea06c9fea2ff02972b08 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 12 Mar 2021 13:33:59 +0800 Subject: [PATCH 309/332] chore: lint code style --- framework/Source/BasicOperation.swift | 2 +- framework/Source/Operations/AverageLuminanceThreshold.swift | 2 +- framework/Source/Operations/HarrisCornerDetector.swift | 2 +- framework/Source/Operations/MotionDetector.swift | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index f39e83cb..afab7696 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -20,7 +20,7 @@ open class BasicOperation: ImageProcessingOperation { public var mask: ImageSource? { didSet { if let mask = mask { - maskImageRelay.newImageCallback = {[weak self] framebuffer in + maskImageRelay.newImageCallback = { [weak self] framebuffer in self?.maskFramebuffer?.unlock() framebuffer.lock() self?.maskFramebuffer = framebuffer diff --git a/framework/Source/Operations/AverageLuminanceThreshold.swift b/framework/Source/Operations/AverageLuminanceThreshold.swift index fbf2d76c..f0cc3a2d 100644 --- a/framework/Source/Operations/AverageLuminanceThreshold.swift +++ b/framework/Source/Operations/AverageLuminanceThreshold.swift @@ -7,7 +7,7 @@ public class AverageLuminanceThreshold: OperationGroup { public override init() { super.init() - averageLuminance.extractedLuminanceCallback = {[weak self] luminance in + averageLuminance.extractedLuminanceCallback = { [weak self] luminance in self?.luminanceThreshold.threshold = (self?.thresholdMultiplier ?? 1.0) * luminance } diff --git a/framework/Source/Operations/HarrisCornerDetector.swift b/framework/Source/Operations/HarrisCornerDetector.swift index d8ea06e2..d14fb6a0 100644 --- a/framework/Source/Operations/HarrisCornerDetector.swift +++ b/framework/Source/Operations/HarrisCornerDetector.swift @@ -44,7 +44,7 @@ public class HarrisCornerDetector: OperationGroup { ({ sensitivity = 5.0 })() ({ threshold = 0.2 })() - outputImageRelay.newImageCallback = {[weak self] framebuffer in + outputImageRelay.newImageCallback = { [weak self] framebuffer in if let cornersDetectedCallback = self?.cornersDetectedCallback { cornersDetectedCallback(extractCornersFromImage(framebuffer)) } diff --git a/framework/Source/Operations/MotionDetector.swift b/framework/Source/Operations/MotionDetector.swift index 11abecfc..a047e3d7 100644 --- a/framework/Source/Operations/MotionDetector.swift +++ b/framework/Source/Operations/MotionDetector.swift @@ -9,7 +9,7 @@ public class MotionDetector: OperationGroup { public override init() { super.init() - averageColorExtractor.extractedColorCallback = {[weak self] color in + averageColorExtractor.extractedColorCallback = { [weak self] color in self?.motionDetectedCallback?(Position(color.redComponent / color.alphaComponent, color.greenComponent / color.alphaComponent), color.alphaComponent) } From c1967f894273b0a9fa31229e26225000ada1fce7 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Wed, 3 Mar 2021 14:20:19 +0800 Subject: [PATCH 310/332] improve(movie-player): remove init thread check. --- framework/Source/iOS/MoviePlayer.swift | 2 -- 1 file changed, 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 2a1d18b9..967e5663 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -123,8 +123,6 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public override init() { print("[MoviePlayer] init") - // Make sure player it intialized on the main thread, or it might cause KVO crash - assert(Thread.isMainThread) super.init() } From 4c8cffdfcf6fb3e5494058dbad7c6c674fb3b38d Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Thu, 25 Mar 2021 14:37:57 +0800 Subject: [PATCH 311/332] chore(player): adjust for streaming. --- framework/Source/iOS/MoviePlayer.swift | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 967e5663..2b1a6fd3 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -56,7 +56,11 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public var dropFrameBeforeTime: CMTime? public var playrate: Float = 1.0 public var assetDuration: CMTime { - return asset?.duration ?? .zero + if asset?.statusOfValue(forKey: "duration", error: nil) == .loaded { + return asset?.duration ?? .zero + } else { + return .zero + } } public var isReadyToPlay: Bool { return status == .readyToPlay @@ -123,6 +127,8 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public override init() { print("[MoviePlayer] init") + // Make sure player it intialized on the main thread, or it might cause KVO crash + assert(Thread.isMainThread) super.init() } @@ -288,7 +294,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } isPlaying = true isProcessing = false - print("[MoviePlayer] start duration:\(String(describing: asset?.duration.seconds)) items:\(items())") +// print("[MoviePlayer] start duration:\(String(describing: asset?.duration.seconds)) items:\(items())") _setupDisplayLinkIfNeeded() _resetTimeObservers() didNotifyEndedItem = nil @@ -562,8 +568,8 @@ private extension MoviePlayer { } func playerItemStatusDidChange(_ playerItem: AVPlayerItem) { - debugPrint("[MoviePlayer] PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset)") - if playerItem == currentItem { + debugPrint("[MoviePlayer] PlayerItem status change to:\(playerItem.status.rawValue) asset:\(playerItem.asset), error: \(playerItem.error)") + if playerItem == currentItem && playerItem.status == .readyToPlay { resumeIfNeeded() } } From 0f91c02e4a3db19ae0dead9bfb539e5aa848f531 Mon Sep 17 00:00:00 2001 From: RoCry Date: Thu, 25 Mar 2021 18:13:40 +0800 Subject: [PATCH 312/332] feat: add inputTime --- framework/Source/OpenGLRendering.swift | 4 ++++ framework/Source/ShaderProgram.swift | 12 ++++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/framework/Source/OpenGLRendering.swift b/framework/Source/OpenGLRendering.swift index aa3ac8c4..b57c3ae4 100755 --- a/framework/Source/OpenGLRendering.swift +++ b/framework/Source/OpenGLRendering.swift @@ -104,6 +104,10 @@ public func renderQuadWithShader(_ shader: ShaderProgram, uniformSettings: Shade shader.setValue(GLint(index), forUniform: "inputImageTexture".withNonZeroSuffix(index)) } + + if let initTime = shader.initTime { + shader.setValue(GLfloat(CACurrentMediaTime() - initTime), forUniform: "inputTime") + } glDrawArrays(GLenum(GL_TRIANGLE_STRIP), 0, 4) diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index ab4be862..035f4a1d 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -27,6 +27,7 @@ public class ShaderProgram { public var colorUniformsUseFourComponents = false public static var disableAttributeCache: Bool = false let program: GLuint + let initTime: CFTimeInterval? var vertexShader: GLuint! // At some point, the Swift compiler will be able to deal with the early throw and we can convert these to lets var fragmentShader: GLuint! private var attributeAddresses = [String: GLuint]() @@ -43,10 +44,17 @@ public class ShaderProgram { self.vertexShader = try compileShader(vertexShader, type: .vertex) self.fragmentShader = try compileShader(fragmentShader, type: .fragment) - + + // tricky way to control if needs set inputTime + if fragmentShader.contains("uniform float inputTime") { + self.initTime = CACurrentMediaTime() + } else { + self.initTime = nil + } + glAttachShader(program, self.vertexShader) glAttachShader(program, self.fragmentShader) - + try link() } From 2cd3e6340b4db10f74db298b74e56d5c624dfd11 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 12 Apr 2021 16:27:25 +0800 Subject: [PATCH 313/332] improve: make sure LUT intensity is always within valid range --- framework/Source/Operations/LookupFilter.swift | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/framework/Source/Operations/LookupFilter.swift b/framework/Source/Operations/LookupFilter.swift index 836c9ca1..81e56620 100755 --- a/framework/Source/Operations/LookupFilter.swift +++ b/framework/Source/Operations/LookupFilter.swift @@ -1,7 +1,16 @@ // PictureInput isn't defined yet on Linux, so this operation is inoperable there #if !os(Linux) public class LookupFilter: BasicOperation { - public var intensity: Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } + public var intensity: Float = 1.0 { + didSet { + if intensity < 0 || intensity > 1.0 { + assertionFailure("LookupFilter intensity:\(intensity) is out of valid range [0, 1.0]") + intensity = min(max(intensity, 0), 1.0) + return + } + uniformSettings["intensity"] = intensity + } + } public var lookupImage: PictureInput? { // TODO: Check for retain cycles in all cases here didSet { lookupImage?.addTarget(self, atTargetIndex: 1) From a58e321f5fa985f5164da4627ffec307990a4808 Mon Sep 17 00:00:00 2001 From: Herry Zhong Date: Wed, 14 Apr 2021 14:37:21 +0800 Subject: [PATCH 314/332] improve(picture): add a PictureInput init method for only display image region in a big image --- framework/Source/iOS/PictureInput.swift | 54 ++++++++++++++++++++----- 1 file changed, 43 insertions(+), 11 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 27fcbd30..886ebaa8 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -18,6 +18,7 @@ public enum PictureInputError: Error, CustomStringConvertible { case zeroSizedImageError case dataProviderNilError case noSuchImageError(imageName: String) + case createImageError public var errorDescription: String { switch self { @@ -27,6 +28,8 @@ public enum PictureInputError: Error, CustomStringConvertible { return "Unable to retrieve image dataProvider" case .noSuchImageError(let imageName): return "No such image named: \(imageName) in your application bundle" + case .createImageError: + return "Fail to create image" } } @@ -41,6 +44,7 @@ public class PictureInput: ImageSource { public var framebufferUserInfo: [AnyHashable: Any]? public let imageName: String var hasProcessedImage: Bool = false + private static var ciContext = CIContext(options: nil) public init( image: CGImage, @@ -48,7 +52,7 @@ public class PictureInput: ImageSource { smoothlyScaleOutput: Bool = false, orientation: ImageOrientation = .portrait) throws { self.imageName = imageName ?? "CGImage" - + let widthOfImage = GLint(image.width) let heightOfImage = GLint(image.height) @@ -162,21 +166,47 @@ public class PictureInput: ImageSource { try self.init(image: image.cgImage!, imageName: imageName, smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation ?? image.imageOrientation.gpuOrientation) } + public convenience init(image: UIImage, imageSize: CGSize, renderTargetSize: CGSize, renderTargetOffset: CGPoint, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { + var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation + var cgImage: CGImage = image.cgImage! + try autoreleasepool { + let options: [CIImageOption : Any] = [.applyOrientationProperty : true, + .properties : [kCGImagePropertyOrientation : image.imageOrientation.cgImageOrientation.rawValue]] + var newImage = CIImage(cgImage: cgImage, options: options) + // scale to image size + let ratioW = imageSize.width / image.size.width + let ratioH = imageSize.height / image.size.height + let fillRatio = max(ratioW, ratioH) + newImage = newImage.transformed(by: CGAffineTransform(scaleX: fillRatio, y: fillRatio)) + let displayFrame = CGRect(origin: CGPoint(x: renderTargetOffset.x * imageSize.width, y: renderTargetOffset.y * imageSize.height), size: renderTargetSize) + // crop image to target display frame + newImage = newImage.cropped(to: displayFrame) + guard let newCgImage = PictureInput.ciContext.createCGImage(newImage, from: newImage.extent) else { + throw PictureInputError.createImageError + } + cgImage = newCgImage + targetOrientation = orientation ?? .portrait + } + try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation) + } + public convenience init(image: UIImage, size: CGSize?, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil, transforms: [[PictureInputTransformStep]]? = nil) throws { var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation var cgImage: CGImage = image.cgImage! if let targetSize = size { - autoreleasepool { + try autoreleasepool { // Get CIImage with orientation guard var newImage = CIImage( - image: image, - options: [ - .applyOrientationProperty: true, - .properties: [ - kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue - ] + image: image, + options: [ + .applyOrientationProperty: true, + .properties: [ + kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue ] - ) else { return } + ] + ) else { + throw PictureInputError.createImageError + } // Scale let ratioW = targetSize.width / image.size.width @@ -221,8 +251,10 @@ public class PictureInput: ImageSource { height: targetSize.height ) - let context = CIContext(options: nil) - cgImage = context.createCGImage(newImage, from: cropRect)! + guard let newCgImage = PictureInput.ciContext.createCGImage(newImage, from: cropRect) else { + throw PictureInputError.createImageError + } + cgImage = newCgImage targetOrientation = orientation ?? .portrait } } From 3f44c94074f2ea18892c52c857de1f77c0dd69f8 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 18 Apr 2021 17:19:33 +0800 Subject: [PATCH 315/332] improve(PictureInput): support UIImage that associated with CIImage so that it can reduce image rendering to CGImage --- framework/Source/iOS/PictureInput.swift | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 886ebaa8..cd2099bd 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -192,19 +192,22 @@ public class PictureInput: ImageSource { public convenience init(image: UIImage, size: CGSize?, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil, transforms: [[PictureInputTransformStep]]? = nil) throws { var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation - var cgImage: CGImage = image.cgImage! + var croppedCGImage: CGImage? if let targetSize = size { try autoreleasepool { // Get CIImage with orientation - guard var newImage = CIImage( - image: image, - options: [ + let ciImage: CIImage? + if let associatedCIImage = image.ciImage { + ciImage = associatedCIImage + } else { + ciImage = CIImage(image: image, options: [ .applyOrientationProperty: true, .properties: [ kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue ] - ] - ) else { + ]) + } + guard var newImage = ciImage else { throw PictureInputError.createImageError } @@ -254,9 +257,14 @@ public class PictureInput: ImageSource { guard let newCgImage = PictureInput.ciContext.createCGImage(newImage, from: cropRect) else { throw PictureInputError.createImageError } - cgImage = newCgImage + croppedCGImage = newCgImage targetOrientation = orientation ?? .portrait } + } else { + croppedCGImage = image.cgImage! + } + guard let cgImage = croppedCGImage else { + throw PictureInputError.createImageError } try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation) } From bdfa5dc53821cdd612fa08ecf530f65fc38ae2ab Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 27 Apr 2021 14:48:04 +0800 Subject: [PATCH 316/332] improve: add more render info and make PictureInput to print debug info everytime when debugging --- framework/Source/BasicOperation.swift | 23 +++++ framework/Source/Framebuffer.swift | 6 ++ framework/Source/ImageGenerator.swift | 4 + framework/Source/OpenGLRendering.swift | 4 + framework/Source/OperationGroup.swift | 8 ++ framework/Source/Operations/ImageBuffer.swift | 4 + .../Source/Operations/LookupFilter.swift | 3 + framework/Source/Pipeline.swift | 26 ++++++ framework/Source/RawDataInput.swift | 4 + framework/Source/RawDataOutput.swift | 18 ++++ framework/Source/Size.swift | 4 + framework/Source/TextureInput.swift | 4 + framework/Source/TextureOutput.swift | 18 ++++ framework/Source/iOS/Camera.swift | 16 ++++ framework/Source/iOS/MovieCache.swift | 3 + framework/Source/iOS/MovieInput.swift | 14 +++ framework/Source/iOS/MovieOutput.swift | 21 ++++- framework/Source/iOS/MoviePlayer.swift | 16 ++++ framework/Source/iOS/OpenGLContext.swift | 9 ++ framework/Source/iOS/PictureInput.swift | 89 ++++++++++++++++++- framework/Source/iOS/PictureOutput.swift | 18 ++++ framework/Source/iOS/RenderView.swift | 21 ++++- 22 files changed, 328 insertions(+), 5 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index afab7696..b5aab8d8 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -49,6 +49,10 @@ open class BasicOperation: ImageProcessingOperation { let maskImageRelay = ImageRelay() var maskFramebuffer: Framebuffer? + #if DEBUG + public var debugRenderInfo: String = "" + #endif + // MARK: - // MARK: Initialization and teardown @@ -104,6 +108,25 @@ open class BasicOperation: ImageProcessingOperation { } open func renderFrame() { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + var inputsDebugInfo = "" + for framebuffer in inputFramebuffers { + inputsDebugInfo.append("\(framebuffer.value.debugRenderInfo), ") + } + debugRenderInfo = """ +{ + \(Self.self): { + inputs: [ + \(inputsDebugInfo) + ], + output: { size: \(renderFramebuffer.debugRenderInfo), time: \((CACurrentMediaTime() - startTime) * 1000.0)ms } + } +}, +""" + } + #endif renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil: mask != nil) let textureProperties = initialTextureProperties() diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 107c6680..879d5abf 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -198,6 +198,12 @@ public class Framebuffer: Hashable { public func hash(into hasher: inout Hasher) { hasher.combine(id) } + + #if DEBUG + public var debugRenderInfo: String { + "{ size: \(size.width)x\(size.height) }" + } + #endif } func hashForFramebufferWithProperties(orientation: ImageOrientation, size: GLSize, textureOnly: Bool = false, minFilter: Int32 = GL_LINEAR, magFilter: Int32 = GL_LINEAR, wrapS: Int32 = GL_CLAMP_TO_EDGE, wrapT: Int32 = GL_CLAMP_TO_EDGE, internalFormat: Int32 = GL_RGBA, format: Int32 = GL_BGRA, type: Int32 = GL_UNSIGNED_BYTE, stencil: Bool = false) -> Int64 { diff --git a/framework/Source/ImageGenerator.swift b/framework/Source/ImageGenerator.swift index 3a0893c9..604f7032 100644 --- a/framework/Source/ImageGenerator.swift +++ b/framework/Source/ImageGenerator.swift @@ -1,5 +1,9 @@ public class ImageGenerator: ImageSource { public var size: Size + + #if DEBUG + public var debugRenderInfo: String { "{ ImageGenerator: { output: \(imageFramebuffer.debugRenderInfo) } }," } + #endif public let targets = TargetContainer() var imageFramebuffer: Framebuffer! diff --git a/framework/Source/OpenGLRendering.swift b/framework/Source/OpenGLRendering.swift index b57c3ae4..aeb9ff87 100755 --- a/framework/Source/OpenGLRendering.swift +++ b/framework/Source/OpenGLRendering.swift @@ -50,6 +50,10 @@ public struct GLSize { self.width = size.glWidth() self.height = size.glHeight() } + + #if DEBUG + public var debugRenderInfo: String { "\(width)x\(height)" } + #endif } extension Size { diff --git a/framework/Source/OperationGroup.swift b/framework/Source/OperationGroup.swift index 791a9b1d..3571c019 100644 --- a/framework/Source/OperationGroup.swift +++ b/framework/Source/OperationGroup.swift @@ -6,6 +6,14 @@ open class OperationGroup: ImageProcessingOperation { public var targets: TargetContainer { get { return outputImageRelay.targets } } public let maximumInputs: UInt = 1 + #if DEBUG + public var debugRenderInfo: String = "" + + public func debugGetOnePassRenderInfos() -> String { + return inputImageRelay.debugGetOnePassRenderInfos() + } + #endif + public init() { } diff --git a/framework/Source/Operations/ImageBuffer.swift b/framework/Source/Operations/ImageBuffer.swift index fa09905e..a2deb0df 100644 --- a/framework/Source/Operations/ImageBuffer.swift +++ b/framework/Source/Operations/ImageBuffer.swift @@ -7,6 +7,10 @@ public class ImageBuffer: ImageProcessingOperation { public let targets = TargetContainer() public let sources = SourceContainer() var bufferedFramebuffers = [Framebuffer]() + + #if DEBUG + public var debugRenderInfo: String { "" } + #endif public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { bufferedFramebuffers.append(framebuffer) diff --git a/framework/Source/Operations/LookupFilter.swift b/framework/Source/Operations/LookupFilter.swift index 81e56620..a0e847f9 100755 --- a/framework/Source/Operations/LookupFilter.swift +++ b/framework/Source/Operations/LookupFilter.swift @@ -14,6 +14,9 @@ public class LookupFilter: BasicOperation { public var lookupImage: PictureInput? { // TODO: Check for retain cycles in all cases here didSet { lookupImage?.addTarget(self, atTargetIndex: 1) + #if DEBUG + lookupImage?.printDebugRenderInfos = true + #endif lookupImage?.processImage() } } diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 12b364e0..217456d7 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -6,12 +6,19 @@ public var _needCheckFilterContainerThread: Bool? public protocol ImageSource: AnyObject { var _needCheckSourceThread: Bool { get } + #if DEBUG + var debugRenderInfo: String { get } + func debugGetOnePassRenderInfos() -> String + #endif var targets: TargetContainer { get } func transmitPreviousImage(to target: ImageConsumer, atIndex: UInt) } public protocol ImageConsumer: AnyObject { var _needCheckConsumerThread: Bool { get } + #if DEBUG + var debugRenderInfo: String { get } + #endif var maximumInputs: UInt { get } var sources: SourceContainer { get } @@ -100,6 +107,21 @@ public extension ImageSource { target.newFramebufferAvailable(framebuffer, fromSourceIndex: index) } } + + #if DEBUG + func debugGetOnePassRenderInfos() -> String { + var renderInfos = "" + renderInfos.append(debugRenderInfo) + for target in targets { + if let source = target.0 as? ImageSource { + renderInfos.append(source.debugGetOnePassRenderInfos()) + } else { + renderInfos.append(target.0.debugRenderInfo) + } + } + return renderInfos + } + #endif } public extension ImageConsumer { @@ -312,6 +334,10 @@ public class ImageRelay: ImageProcessingOperation { target.newFramebufferAvailable(framebuffer, fromSourceIndex: index) } } + + #if DEBUG + public var debugRenderInfo: String = "" + #endif } public protocol DebugPipelineNameable { diff --git a/framework/Source/RawDataInput.swift b/framework/Source/RawDataInput.swift index a61dbba9..5e0e5560 100644 --- a/framework/Source/RawDataInput.swift +++ b/framework/Source/RawDataInput.swift @@ -32,6 +32,10 @@ public enum PixelFormat { public class RawDataInput: ImageSource { public let targets = TargetContainer() + #if DEBUG + public var debugRenderInfo: String = "" + #endif + public init() { } diff --git a/framework/Source/RawDataOutput.swift b/framework/Source/RawDataOutput.swift index e2fb1290..cd5b5069 100644 --- a/framework/Source/RawDataOutput.swift +++ b/framework/Source/RawDataOutput.swift @@ -17,12 +17,30 @@ public class RawDataOutput: ImageConsumer { public let sources = SourceContainer() public let maximumInputs: UInt = 1 + + #if DEBUG + public var debugRenderInfo: String = "" + #endif public init() { } // TODO: Replace with texture caches public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + debugRenderInfo = """ +{ + RawDataOutput: { + input: \(framebuffer.debugRenderInfo), + output: { size: \(framebuffer.size.width * framebuffer.size.height * 4), type: RGBData }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + } + #endif let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: framebuffer.orientation, size: framebuffer.size) renderFramebuffer.lock() diff --git a/framework/Source/Size.swift b/framework/Source/Size.swift index 5bee3434..1e22cefa 100644 --- a/framework/Source/Size.swift +++ b/framework/Source/Size.swift @@ -6,4 +6,8 @@ public struct Size { self.width = width self.height = height } + + #if DEBUG + public var debugRenderInfo: String { "\(width)x\(height)" } + #endif } diff --git a/framework/Source/TextureInput.swift b/framework/Source/TextureInput.swift index da83022a..855c0deb 100644 --- a/framework/Source/TextureInput.swift +++ b/framework/Source/TextureInput.swift @@ -15,6 +15,10 @@ public class TextureInput: ImageSource { public let targets = TargetContainer() + #if DEBUG + public var debugRenderInfo: String = "" + #endif + let textureFramebuffer: Framebuffer public init(texture: GLuint, size: Size, orientation: ImageOrientation = .portrait) { diff --git a/framework/Source/TextureOutput.swift b/framework/Source/TextureOutput.swift index 7523c298..48881767 100644 --- a/framework/Source/TextureOutput.swift +++ b/framework/Source/TextureOutput.swift @@ -18,7 +18,25 @@ public class TextureOutput: ImageConsumer { public let sources = SourceContainer() public let maximumInputs: UInt = 1 + #if DEBUG + public var debugRenderInfo: String = "" + #endif + public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + debugRenderInfo = """ +{ + TextureOutput: { + input: \(framebuffer.debugRenderInfo), + output: { size: \(framebuffer.size.width * framebuffer.size.height * 4) type: TextureCallback }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + } + #endif newTextureAvailableCallback?(framebuffer.texture) // TODO: Maybe extend the lifetime of the texture past this if needed framebuffer.unlock() diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 31a41db3..d925ac60 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -127,6 +127,10 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var lastCheckTime = CACurrentMediaTime() var captureSessionRestartAttempts = 0 + + #if DEBUG + public var debugRenderInfo: String = "" + #endif public init(sessionPreset: AVCaptureSession.Preset, cameraDevice: AVCaptureDevice? = nil, location: PhysicalCameraLocation = .backFacing, captureAsYUV: Bool = true, photoOutput: AVCapturePhotoOutput? = nil, metadataDelegate: AVCaptureMetadataOutputObjectsDelegate? = nil, metadataObjectTypes: [AVMetadataObject.ObjectType]? = nil, deviceType: AVCaptureDevice.DeviceType = .builtInWideAngleCamera) throws { debugPrint("camera init") @@ -395,6 +399,18 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + #if DEBUG + self.debugRenderInfo = """ +{ + Camera: { + input: \(bufferWidth)x\(bufferHeight), input_type: CMSampleBuffer, + output: { size: \(cameraFramebuffer.debugRenderInfo) }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + #endif + cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) diff --git a/framework/Source/iOS/MovieCache.swift b/framework/Source/iOS/MovieCache.swift index fa2ebe6e..5a49de8d 100644 --- a/framework/Source/iOS/MovieCache.swift +++ b/framework/Source/iOS/MovieCache.swift @@ -54,6 +54,9 @@ public class MovieCache: ImageConsumer, AudioEncodingTarget { return movieOutput.writerStatus == .unknown } private var startingVideoID: String? + #if DEBUG + public var debugRenderInfo: String = "{ MovieCache: passthrough }," + #endif public init() { print("MovieCache init") diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index f8842226..dcbc71e3 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -81,6 +81,10 @@ public class MovieInput: ImageSource { var movieFramebuffer: Framebuffer? public var framebufferUserInfo: [AnyHashable: Any]? + #if DEBUG + public var debugRenderInfo: String = "" + #endif + // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works public init(asset: AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = false, loop: Bool = false, playrate: Double = 1.0, audioSettings: [String: Any]? = nil, maxFPS: Float? = nil) throws { debugPrint("movie input init \(asset)") @@ -465,6 +469,16 @@ public class MovieInput: ImageSource { print("Cannot generate framebuffer from YUVBuffer") return } + #if DEBUG + debugRenderInfo = """ +{ + MovieInput: { + input: \(CVPixelBufferGetWidth(movieFrame))x\(CVPixelBufferGetHeight(movieFrame)), input_type: CVPixelBuffer, + output: { size: \(framebuffer.debugRenderInfo), time: \((CACurrentMediaTime() - startTime) * 1000.0)ms } + } +}, +""" + #endif framebuffer.userInfo = framebufferUserInfo self.movieFramebuffer = framebuffer self.updateTargetsWithFramebuffer(framebuffer) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index deb4014e..947dc563 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -9,7 +9,7 @@ public protocol AudioEncodingTarget { func readyForNextAudioBuffer() -> Bool } -public protocol MovieOutputDelegate: class { +public protocol MovieOutputDelegate: AnyObject { func movieOutputDidStartWriting(_ movieOutput: MovieOutput, at time: CMTime) func movieOutputWriterError(_ movieOutput: MovieOutput, error: Error) } @@ -125,6 +125,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } var preferredTransform: CGAffineTransform? private var isProcessing = false + #if DEBUG + public var debugRenderInfo: String = "" + #endif public init(URL: Foundation.URL, fps: Double, size: Size, needAlignAV: Bool = true, fileType: AVFileType = .mov, liveVideo: Bool = false, videoSettings: [String: Any]? = nil, videoNaturalTimeScale: CMTimeScale? = nil, optimizeForNetworkUse: Bool = false, disablePixelBufferAttachments: Bool = true, audioSettings: [String: Any]? = nil, audioSourceFormatHint: CMFormatDescription? = nil) throws { print("movie output init \(URL)") @@ -407,6 +410,22 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { _decideStartTime() } + #if DEBUG + let startTime = CACurrentMediaTime() + let bufferCount = frameBufferCache.count + defer { + debugRenderInfo = """ +{ + MovieOutput: { + input: \(framebuffer.debugRenderInfo), input_count: \(bufferCount), + output: { size: \(size.debugRenderInfo), type: AVAssetWriter }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + } + #endif + var processedBufferCount = 0 for framebuffer in frameBufferCache { defer { framebuffer.unlock() } diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 2b1a6fd3..123bce55 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -78,6 +78,10 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { var framebufferUserInfo: [AnyHashable: Any]? var observations = [NSKeyValueObservation]() + #if DEBUG + public var debugRenderInfo: String = "" + #endif + struct SeekingInfo: Equatable { let time: CMTime let toleranceBefore: CMTime @@ -620,6 +624,18 @@ private extension MoviePlayer { guard hasTarget, let framebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: timeForDisplay, videoOrientation: videoOrientation) else { return } framebuffer.userInfo = framebufferUserInfo + #if DEBUG + debugRenderInfo = """ +{ + MoviePlayer: { + input: \(CVPixelBufferGetWidth(pixelBuffer))x\(CVPixelBufferGetHeight(pixelBuffer)), input_type: CVPixelBuffer, + output: \(framebuffer.debugRenderInfo), + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + #endif + updateTargetsWithFramebuffer(framebuffer) } diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index de8b128d..010db2b4 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -30,6 +30,7 @@ public class OpenGLContext: SerialDispatch { public let dispatchQueueKey = DispatchSpecificKey() public let dispatchQueueKeyValue: Int public var executeStartTime: TimeInterval? + public lazy var _debugPipelineOnePassRenderInfo = [String]() // MARK: - // MARK: Initialization and teardown @@ -106,4 +107,12 @@ public class OpenGLContext: SerialDispatch { return String(cString: unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to: UnsafePointer.self)) } }() + + public func debugResetOnePassRenderingInfo() { + _debugPipelineOnePassRenderInfo.removeAll() + } + + public func debugAppendRenderingInfo(_ info: String) { + _debugPipelineOnePassRenderInfo.append(info) + } } diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index cd2099bd..ad638eae 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -45,12 +45,33 @@ public class PictureInput: ImageSource { public let imageName: String var hasProcessedImage: Bool = false private static var ciContext = CIContext(options: nil) + #if DEBUG + public var printDebugRenderInfos = true + public var debugRenderInfo: String = "" + #endif public init( image: CGImage, imageName: String? = nil, smoothlyScaleOutput: Bool = false, - orientation: ImageOrientation = .portrait) throws { + orientation: ImageOrientation = .portrait, + preprocessRenderInfo: String = "") throws { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + debugRenderInfo = """ +\(preprocessRenderInfo) +{ + PictureInput: { + input: \(image.width)x\(image.height), input_type: CGImage, + output: { size: \(imageFramebuffer?.debugRenderInfo ?? "") }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms + } +}, +""" + } + #endif + self.imageName = imageName ?? "CGImage" let widthOfImage = GLint(image.width) @@ -167,6 +188,10 @@ public class PictureInput: ImageSource { } public convenience init(image: UIImage, imageSize: CGSize, renderTargetSize: CGSize, renderTargetOffset: CGPoint, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { + #if DEBUG + let startTime = CACurrentMediaTime() + #endif + var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation var cgImage: CGImage = image.cgImage! try autoreleasepool { @@ -187,10 +212,30 @@ public class PictureInput: ImageSource { cgImage = newCgImage targetOrientation = orientation ?? .portrait } - try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation) + + let preprocessRenderInfo: String + #if DEBUG + preprocessRenderInfo = """ +{ + PictureInput_pre_process : { + input: { + size: \(image.size.debugRenderInfo), type: UIImage, imageSize:\(imageSize.debugRenderInfo), renderTargetSize: \(renderTargetSize.debugRenderInfo), renderTargetOffset: \(renderTargetOffset.debugDescription) + }, + output: { size: \(cgImage.width)x\(cgImage.height), type: CGImage }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms +}, +""" + #else + preprocessRenderInfo = "" + #endif + + try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation, preprocessRenderInfo: preprocessRenderInfo) } public convenience init(image: UIImage, size: CGSize?, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil, transforms: [[PictureInputTransformStep]]? = nil) throws { + #if DEBUG + let startTime = CACurrentMediaTime() + #endif var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation var croppedCGImage: CGImage? if let targetSize = size { @@ -260,13 +305,37 @@ public class PictureInput: ImageSource { croppedCGImage = newCgImage targetOrientation = orientation ?? .portrait } + } else if image.imageOrientation != .up, + let ciImage = CIImage(image: image, + options: [.applyOrientationProperty: true, + .properties: [ kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue ]]), + let rotatedImage = PictureInput.ciContext.createCGImage(ciImage, from: ciImage.extent) { + // Rotated correct orientation + croppedCGImage = rotatedImage } else { croppedCGImage = image.cgImage! } guard let cgImage = croppedCGImage else { throw PictureInputError.createImageError } - try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation) + + let preprocessRenderInfo: String + #if DEBUG + preprocessRenderInfo = """ +{ + PictureInput_pre_process : { + input: { + size: \(image.size.debugRenderInfo), type: UIImage, size:\(size?.debugRenderInfo ?? ""), transforms: \(String(describing: transforms)) + }, + output: { size: \(cgImage.width)x\(cgImage.height), type: CGImage }, + time: \((CACurrentMediaTime() - startTime) * 1000.0)ms +}, +""" + #else + preprocessRenderInfo = "" + #endif + + try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation, preprocessRenderInfo: preprocessRenderInfo) } deinit { @@ -284,6 +353,11 @@ public class PictureInput: ImageSource { self.updateTargetsWithFramebuffer(framebuffer) self.hasProcessedImage = true } + #if DEBUG + if self.printDebugRenderInfos { + debugPrint(self.debugGetOnePassRenderInfos()) + } + #endif } } else { sharedImageProcessingContext.runOperationAsynchronously { @@ -291,6 +365,11 @@ public class PictureInput: ImageSource { self.updateTargetsWithFramebuffer(framebuffer) self.hasProcessedImage = true } + #if DEBUG + if self.printDebugRenderInfos { + debugPrint(self.debugGetOnePassRenderInfos()) + } + #endif } } } @@ -315,4 +394,8 @@ public extension CGSize { return CGSize(width: height, height: width) } } + + #if DEBUG + var debugRenderInfo: String { "\(width)x\(height)" } + #endif } diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 35be3ac5..a95a7b88 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -21,6 +21,10 @@ public class PictureOutput: ImageConsumer { public let maximumInputs: UInt = 1 var url: URL! + #if DEBUG + public var debugRenderInfo: String = "" + #endif + public init() { debugPrint("PictureOutput init") } @@ -63,6 +67,20 @@ public class PictureOutput: ImageConsumer { } public func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) { + #if DEBUG + let startTime = CACurrentMediaTime() + defer { + debugRenderInfo = """ +{ + PictureOutput: { + input: \(framebuffer.debugRenderInfo), + output: { type: ImageOutput, time: \((CACurrentMediaTime() - startTime) * 1000.0)ms } + } +}, +""" + } + #endif + if keepImageAroundForSynchronousCapture { storedFramebuffer?.unlock() storedFramebuffer = framebuffer diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index b0aaadd5..fa3e2c20 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -1,6 +1,6 @@ import UIKit -public protocol RenderViewDelegate: class { +public protocol RenderViewDelegate: AnyObject { func willDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) func didDisplayFramebuffer(renderView: RenderView, framebuffer: Framebuffer) // Only use this if you need to do layout in willDisplayFramebuffer before the framebuffer actually gets displayed @@ -33,6 +33,9 @@ public class RenderView: UIView, ImageConsumer { }() private var internalLayer: CAEAGLLayer! + #if DEBUG + public var debugRenderInfo: String = "" + #endif required public init?(coder: NSCoder) { super.init(coder: coder) @@ -207,6 +210,11 @@ public class RenderView: UIView, ImageConsumer { // Bail if we couldn't successfully create the displayFramebuffer return } + + #if DEBUG + let startTime = CACurrentMediaTime() + #endif + self.activateDisplayFramebuffer() clearFramebufferWithColor(self.backgroundRenderColor) @@ -233,6 +241,17 @@ public class RenderView: UIView, ImageConsumer { sharedImageProcessingContext.presentBufferForDisplay() cleanup() + + #if DEBUG + self.debugRenderInfo = """ +{ + RenderView: { + input: \(framebuffer.debugRenderInfo), + output: { size: \(self.backingSize.debugRenderInfo), time: \((CACurrentMediaTime() - startTime) * 1000.0)ms } + } +}, +""" + #endif } if self.delegate?.shouldDisplayNextFramebufferAfterMainThreadLoop() ?? false { From e7477f7e3cddfa082d0086bc7c62bc2a0d7c99c5 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 27 Apr 2021 19:31:46 +0800 Subject: [PATCH 317/332] chore: add debugRenderInfo for OperationGroup --- framework/Source/OperationGroup.swift | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/framework/Source/OperationGroup.swift b/framework/Source/OperationGroup.swift index 3571c019..edfa5af0 100644 --- a/framework/Source/OperationGroup.swift +++ b/framework/Source/OperationGroup.swift @@ -10,7 +10,10 @@ open class OperationGroup: ImageProcessingOperation { public var debugRenderInfo: String = "" public func debugGetOnePassRenderInfos() -> String { - return inputImageRelay.debugGetOnePassRenderInfos() + return """ +{ OperationGroup: \(Self.self), subfilters: \(inputImageRelay.debugGetOnePassRenderInfos()) +} +""" } #endif From ae1ef5b20f965847c07df8e7000aee38bbef2e5d Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 27 Apr 2021 20:38:51 +0800 Subject: [PATCH 318/332] improve(PictureInput): fix scaled image might have a white line on edges --- framework/Source/iOS/PictureInput.swift | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index ad638eae..086ebe02 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -202,7 +202,7 @@ public class PictureInput: ImageSource { let ratioW = imageSize.width / image.size.width let ratioH = imageSize.height / image.size.height let fillRatio = max(ratioW, ratioH) - newImage = newImage.transformed(by: CGAffineTransform(scaleX: fillRatio, y: fillRatio)) + newImage = newImage.scaled(by: fillRatio, roundRect: true) let displayFrame = CGRect(origin: CGPoint(x: renderTargetOffset.x * imageSize.width, y: renderTargetOffset.y * imageSize.height), size: renderTargetSize) // crop image to target display frame newImage = newImage.cropped(to: displayFrame) @@ -260,7 +260,7 @@ public class PictureInput: ImageSource { let ratioW = targetSize.width / image.size.width let ratioH = targetSize.height / image.size.height let fillRatio = max(ratioW, ratioH) - newImage = newImage.transformed(by: CGAffineTransform(scaleX: fillRatio, y: fillRatio)) + newImage = newImage.scaled(by: fillRatio, roundRect: true) var scaleX: CGFloat = 1 var scaleY: CGFloat = 1 @@ -399,3 +399,18 @@ public extension CGSize { var debugRenderInfo: String { "\(width)x\(height)" } #endif } + +private extension CIImage { + func scaled(by scaleRatio: CGFloat, roundRect: Bool) -> CIImage { + let scaleTransform = CGAffineTransform(scaleX: scaleRatio, y: scaleRatio) + // NOTE: CIImage.extend will always return an integral rect, so if we want the accurate rect after transforming, we need to apply transform on the original rect + let transformedRect = extent.applying(scaleTransform) + let scaledImage = transformed(by: scaleTransform) + if roundRect { + let originRoundedImage = scaledImage.transformed(by: CGAffineTransform(translationX: transformedRect.origin.x.rounded(.towardZero) - transformedRect.origin.x, y: transformedRect.origin.y.rounded(.towardZero) - transformedRect.origin.y)) + return originRoundedImage + } else { + return scaledImage + } + } +} From a0189d26a57fe775dfc89bf7fe40f45773beee79 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 29 Apr 2021 00:31:24 +0800 Subject: [PATCH 319/332] chore: update code style by lint --- .../FilterShowcase/FilterOperations.swift | 126 +++++++++--------- .../FilterShowcaseWindowController.swift | 2 +- .../FilterDisplayViewController.swift | 2 +- .../SimpleVideoFilter/ViewController.swift | 8 +- framework/Source/BasicOperation.swift | 2 +- framework/Source/Mac/Camera.swift | 2 +- framework/Source/Mac/MovieInput.swift | 2 +- framework/Source/iOS/Camera.swift | 2 +- framework/Source/iOS/PictureInput.swift | 4 +- 9 files changed, 75 insertions(+), 75 deletions(-) diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift index ce850fd9..182a00c9 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift @@ -6,7 +6,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { AlphaBlend() }, listName: "Highlights Blur", titleName: "Gaussian Blur Lumi>0.6(alpha)", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.8), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.8), sliderUpdateCallback: {filter, sliderValue in filter.mix = sliderValue }, @@ -24,7 +24,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { AlphaBlend() }, listName: "Soft Focus", titleName: "Gaussian Blur + Alpha Blend", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in filter.mix = sliderValue }, @@ -43,7 +43,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { SaturationAdjustment() }, listName: "Saturation", titleName: "Saturation", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.saturation = sliderValue }, @@ -53,7 +53,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { ContrastAdjustment() }, listName: "Contrast", titleName: "Contrast", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 4.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.contrast = sliderValue }, @@ -63,7 +63,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { BrightnessAdjustment() }, listName: "Brightness", titleName: "Brightness", - sliderConfiguration: .enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.0), + sliderConfiguration: .enabled(minimumValue: -1.0, maximumValue: 1.0, initialValue: 0.0), sliderUpdateCallback: {filter, sliderValue in filter.brightness = sliderValue }, @@ -73,7 +73,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { LevelsAdjustment() }, listName: "Levels", titleName: "Levels", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.0), sliderUpdateCallback: {filter, sliderValue in filter.minimum = Color(red: Float(sliderValue), green: Float(sliderValue), blue: Float(sliderValue)) filter.middle = Color(red: 1.0, green: 1.0, blue: 1.0) @@ -87,7 +87,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { ExposureAdjustment() }, listName: "Exposure", titleName: "Exposure", - sliderConfiguration: .enabled(minimumValue:-4.0, maximumValue:4.0, initialValue:0.0), + sliderConfiguration: .enabled(minimumValue: -4.0, maximumValue: 4.0, initialValue: 0.0), sliderUpdateCallback: {filter, sliderValue in filter.exposure = sliderValue }, @@ -97,7 +97,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { RGBAdjustment() }, listName: "RGB", titleName: "RGB", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.green = sliderValue }, @@ -107,7 +107,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { HueAdjustment() }, listName: "Hue", titleName: "Hue", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:360.0, initialValue:90.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 360.0, initialValue: 90.0), sliderUpdateCallback: {filter, sliderValue in filter.hue = sliderValue }, @@ -117,7 +117,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { WhiteBalance() }, listName: "White balance", titleName: "White Balance", - sliderConfiguration: .enabled(minimumValue:2500.0, maximumValue:7500.0, initialValue:5000.0), + sliderConfiguration: .enabled(minimumValue: 2500.0, maximumValue: 7500.0, initialValue: 5000.0), sliderUpdateCallback: {filter, sliderValue in filter.temperature = sliderValue }, @@ -127,7 +127,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { MonochromeFilter() }, listName: "Monochrome", titleName: "Monochrome", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, @@ -150,7 +150,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Sharpen() }, listName: "Sharpen", titleName: "Sharpen", - sliderConfiguration: .enabled(minimumValue:-1.0, maximumValue:4.0, initialValue:0.0), + sliderConfiguration: .enabled(minimumValue: -1.0, maximumValue: 4.0, initialValue: 0.0), sliderUpdateCallback: {filter, sliderValue in filter.sharpness = sliderValue }, @@ -160,7 +160,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { UnsharpMask() }, listName: "Unsharp mask", titleName: "Unsharp Mask", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 5.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, @@ -170,7 +170,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { TransformOperation() }, listName: "Transform (2-D)", titleName: "Transform (2-D)", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 6.28, initialValue: 0.75), sliderUpdateCallback: {filter, sliderValue in filter.transform = Matrix4x4(CGAffineTransform(rotationAngle: CGFloat(sliderValue))) }, @@ -180,7 +180,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { TransformOperation() }, listName: "Transform (3-D)", titleName: "Transform (3-D)", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 6.28, initialValue: 0.75), sliderUpdateCallback: {filter, sliderValue in var perspectiveTransform = CATransform3DIdentity perspectiveTransform.m34 = 0.4 @@ -195,7 +195,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Crop() }, listName: "Crop", titleName: "Crop", - sliderConfiguration: .enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), + sliderConfiguration: .enabled(minimumValue: 240.0, maximumValue: 480.0, initialValue: 240.0), sliderUpdateCallback: {filter, sliderValue in filter.cropSizeInPixels = Size(width: 480.0, height: sliderValue) }, @@ -205,7 +205,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { ResizeCrop() }, listName: "ResizeCrop", titleName: "ResizeCrop", - sliderConfiguration: .enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), + sliderConfiguration: .enabled(minimumValue: 240.0, maximumValue: 480.0, initialValue: 240.0), sliderUpdateCallback: {filter, sliderValue in filter.cropSizeInPixels = Size(width: 480.0, height: sliderValue) }, @@ -231,7 +231,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { GammaAdjustment() }, listName: "Gamma", titleName: "Gamma", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:3.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 3.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.gamma = sliderValue }, @@ -242,7 +242,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { HighlightsAndShadows() }, listName: "Highlights and shadows", titleName: "Highlights and Shadows", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.highlights = sliderValue }, @@ -252,7 +252,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Haze() }, listName: "Haze / UV", titleName: "Haze / UV", - sliderConfiguration: .enabled(minimumValue:-0.2, maximumValue:0.2, initialValue:0.2), + sliderConfiguration: .enabled(minimumValue: -0.2, maximumValue: 0.2, initialValue: 0.2), sliderUpdateCallback: {filter, sliderValue in filter.distance = sliderValue }, @@ -262,7 +262,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { SepiaToneFilter() }, listName: "Sepia tone", titleName: "Sepia Tone", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, @@ -304,7 +304,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Solarize() }, listName: "Solarize", titleName: "Solarize", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, @@ -314,7 +314,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Vibrance() }, listName: "Vibrance", titleName: "Vibrance", - sliderConfiguration: .enabled(minimumValue:-1.2, maximumValue:1.2, initialValue:0.0), + sliderConfiguration: .enabled(minimumValue: -1.2, maximumValue: 1.2, initialValue: 0.0), sliderUpdateCallback: {filter, sliderValue in filter.vibrance = sliderValue }, @@ -324,7 +324,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { HighlightAndShadowTint() }, listName: "Highlight and shadow tint", titleName: "Highlight / Shadow Tint", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.0), sliderUpdateCallback: {filter, sliderValue in filter.shadowTintIntensity = sliderValue }, @@ -342,7 +342,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Histogram(type: .rgb) }, listName: "Histogram", titleName: "Histogram", - sliderConfiguration: .enabled(minimumValue:4.0, maximumValue:32.0, initialValue:16.0), + sliderConfiguration: .enabled(minimumValue: 4.0, maximumValue: 32.0, initialValue: 16.0), sliderUpdateCallback: {filter, sliderValue in filter.downsamplingFactor = UInt(round(sliderValue)) }, @@ -407,7 +407,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { LuminanceThreshold() }, listName: "Luminance threshold", titleName: "Luminance Threshold", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, @@ -417,7 +417,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { AdaptiveThreshold() }, listName: "Adaptive threshold", titleName: "Adaptive Threshold", - sliderConfiguration: .enabled(minimumValue:1.0, maximumValue:20.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 1.0, maximumValue: 20.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, @@ -427,7 +427,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { AverageLuminanceThreshold() }, listName: "Average luminance threshold", titleName: "Avg. Lum. Threshold", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.thresholdMultiplier = sliderValue }, @@ -437,7 +437,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Pixellate() }, listName: "Pixellate", titleName: "Pixellate", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 0.3, initialValue: 0.05), sliderUpdateCallback: {filter, sliderValue in filter.fractionalWidthOfAPixel = sliderValue }, @@ -447,7 +447,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { PolarPixellate() }, listName: "Polar pixellate", titleName: "Polar Pixellate", - sliderConfiguration: .enabled(minimumValue:-0.1, maximumValue:0.1, initialValue:0.05), + sliderConfiguration: .enabled(minimumValue: -0.1, maximumValue: 0.1, initialValue: 0.05), sliderUpdateCallback: {filter, sliderValue in filter.pixelSize = Size(width: sliderValue, height: sliderValue) }, @@ -478,7 +478,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { PolkaDot() }, listName: "Polka dot", titleName: "Polka Dot", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 0.3, initialValue: 0.05), sliderUpdateCallback: {filter, sliderValue in filter.fractionalWidthOfAPixel = sliderValue }, @@ -488,7 +488,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Halftone() }, listName: "Halftone", titleName: "Halftone", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:0.05, initialValue:0.01), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 0.05, initialValue: 0.01), sliderUpdateCallback: {filter, sliderValue in filter.fractionalWidthOfAPixel = sliderValue }, @@ -498,7 +498,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Crosshatch() }, listName: "Crosshatch", titleName: "Crosshatch", - sliderConfiguration: .enabled(minimumValue:0.01, maximumValue:0.06, initialValue:0.03), + sliderConfiguration: .enabled(minimumValue: 0.01, maximumValue: 0.06, initialValue: 0.03), sliderUpdateCallback: {filter, sliderValue in filter.crossHatchSpacing = sliderValue }, @@ -508,7 +508,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { SobelEdgeDetection() }, listName: "Sobel edge detection", titleName: "Sobel Edge Detection", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.25), sliderUpdateCallback: {filter, sliderValue in filter.edgeStrength = sliderValue }, @@ -518,7 +518,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { PrewittEdgeDetection() }, listName: "Prewitt edge detection", titleName: "Prewitt Edge Detection", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.edgeStrength = sliderValue }, @@ -528,7 +528,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { CannyEdgeDetection() }, listName: "Canny edge detection", titleName: "Canny Edge Detection", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 4.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, @@ -538,7 +538,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { ThresholdSobelEdgeDetection() }, listName: "Threshold edge detection", titleName: "Threshold Edge Detection", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.25), sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, @@ -548,7 +548,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { HarrisCornerDetector() }, listName: "Harris corner detector", titleName: "Harris Corner Detector", - sliderConfiguration: .enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderConfiguration: .enabled(minimumValue: 0.01, maximumValue: 0.70, initialValue: 0.20), sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, @@ -579,7 +579,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { NobleCornerDetector() }, listName: "Noble corner detector", titleName: "Noble Corner Detector", - sliderConfiguration: .enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderConfiguration: .enabled(minimumValue: 0.01, maximumValue: 0.70, initialValue: 0.20), sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, @@ -610,7 +610,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { ShiTomasiFeatureDetector() }, listName: "Shi-Tomasi feature detector", titleName: "Shi-Tomasi Feature Detector", - sliderConfiguration: .enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderConfiguration: .enabled(minimumValue: 0.01, maximumValue: 0.70, initialValue: 0.20), sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, @@ -650,7 +650,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { LowPassFilter() }, listName: "Low pass", titleName: "Low Pass", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in filter.strength = sliderValue }, @@ -660,7 +660,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { HighPassFilter() }, listName: "High pass", titleName: "High Pass", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in filter.strength = sliderValue }, @@ -672,7 +672,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { SketchFilter() }, listName: "Sketch", titleName: "Sketch", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in filter.edgeStrength = sliderValue }, @@ -682,7 +682,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { ThresholdSketchFilter() }, listName: "Threshold Sketch", titleName: "Threshold Sketch", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.25), sliderUpdateCallback: {filter, sliderValue in filter.threshold = sliderValue }, @@ -700,7 +700,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { SmoothToonFilter() }, listName: "Smooth toon", titleName: "Smooth Toon", - sliderConfiguration: .enabled(minimumValue:1.0, maximumValue:6.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 1.0, maximumValue: 6.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, @@ -710,7 +710,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { TiltShift() }, listName: "Tilt shift", titleName: "Tilt Shift", - sliderConfiguration: .enabled(minimumValue:0.2, maximumValue:0.8, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: 0.2, maximumValue: 0.8, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in filter.topFocusLevel = sliderValue - 0.1 filter.bottomFocusLevel = sliderValue + 0.1 @@ -729,7 +729,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Posterize() }, listName: "Posterize", titleName: "Posterize", - sliderConfiguration: .enabled(minimumValue:1.0, maximumValue:20.0, initialValue:10.0), + sliderConfiguration: .enabled(minimumValue: 1.0, maximumValue: 20.0, initialValue: 10.0), sliderUpdateCallback: {filter, sliderValue in filter.colorLevels = round(sliderValue) }, @@ -758,7 +758,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { EmbossFilter() }, listName: "Emboss", titleName: "Emboss", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 5.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.intensity = sliderValue }, @@ -776,7 +776,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { ChromaKeying() }, listName: "Chroma key", titleName: "Chroma Key", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.00, initialValue:0.40), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.00, initialValue: 0.40), sliderUpdateCallback: {filter, sliderValue in filter.thresholdSensitivity = sliderValue }, @@ -798,7 +798,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { KuwaharaFilter() }, listName: "Kuwahara", titleName: "Kuwahara", - sliderConfiguration: .enabled(minimumValue:3.0, maximumValue:9.0, initialValue:3.0), + sliderConfiguration: .enabled(minimumValue: 3.0, maximumValue: 9.0, initialValue: 3.0), sliderUpdateCallback: {filter, sliderValue in filter.radius = Int(round(sliderValue)) }, @@ -816,7 +816,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { Vignette() }, listName: "Vignette", titleName: "Vignette", - sliderConfiguration: .enabled(minimumValue:0.5, maximumValue:0.9, initialValue:0.75), + sliderConfiguration: .enabled(minimumValue: 0.5, maximumValue: 0.9, initialValue: 0.75), sliderUpdateCallback: {filter, sliderValue in filter.end = sliderValue }, @@ -826,7 +826,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { GaussianBlur() }, listName: "Gaussian blur", titleName: "Gaussian Blur", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 40.0, initialValue: 2.0), sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, @@ -836,7 +836,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { BoxBlur() }, listName: "Box blur", titleName: "Box Blur", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 40.0, initialValue: 2.0), sliderUpdateCallback: {filter, sliderValue in filter.blurRadiusInPixels = sliderValue }, @@ -854,7 +854,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { BilateralBlur() }, listName: "Bilateral blur", titleName: "Bilateral Blur", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:10.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 10.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.distanceNormalizationFactor = sliderValue }, @@ -864,7 +864,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { MotionBlur() }, listName: "Motion blur", titleName: "Motion Blur", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:180.0, initialValue:0.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 180.0, initialValue: 0.0), sliderUpdateCallback: {filter, sliderValue in filter.blurAngle = sliderValue }, @@ -874,7 +874,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { ZoomBlur() }, listName: "Zoom blur", titleName: "Zoom Blur", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.5, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.5, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.blurSize = sliderValue }, @@ -892,7 +892,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { SwirlDistortion() }, listName: "Swirl", titleName: "Swirl", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 2.0, initialValue: 1.0), sliderUpdateCallback: {filter, sliderValue in filter.angle = sliderValue }, @@ -902,7 +902,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { BulgeDistortion() }, listName: "Bulge", titleName: "Bulge", - sliderConfiguration: .enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: -1.0, maximumValue: 1.0, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in // filter.scale = sliderValue filter.center = Position(0.5, sliderValue) @@ -913,7 +913,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { PinchDistortion() }, listName: "Pinch", titleName: "Pinch", - sliderConfiguration: .enabled(minimumValue:-2.0, maximumValue:2.0, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: -2.0, maximumValue: 2.0, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in filter.scale = sliderValue }, @@ -923,7 +923,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { SphereRefraction() }, listName: "Sphere refraction", titleName: "Sphere Refraction", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.15), sliderUpdateCallback: {filter, sliderValue in filter.radius = sliderValue }, @@ -947,7 +947,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { GlassSphereRefraction() }, listName: "Glass sphere", titleName: "Glass Sphere", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.15), sliderUpdateCallback: {filter, sliderValue in filter.radius = sliderValue }, @@ -1030,7 +1030,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { DissolveBlend() }, listName: "Dissolve blend", titleName: "Dissolve Blend", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.5), sliderUpdateCallback: {filter, sliderValue in filter.mix = sliderValue }, @@ -1040,7 +1040,7 @@ let filterOperations: [FilterOperationInterface] = [ filter: { ChromaKeyBlend() }, listName: "Chroma key blend (green)", titleName: "Chroma Key (Green)", - sliderConfiguration: .enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.4), + sliderConfiguration: .enabled(minimumValue: 0.0, maximumValue: 1.0, initialValue: 0.4), sliderUpdateCallback: {filter, sliderValue in filter.thresholdSensitivity = sliderValue }, diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift index 991911da..9d3bad95 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift @@ -59,7 +59,7 @@ class FilterShowcaseWindowController: NSWindowController { self.blendImage.addTarget((currentFilterOperation!.filter)) currentFilterOperation!.filter.addTarget(filterView!) self.blendImage.processImage() - case let .custom(filterSetupFunction:setupFunction): + case let .custom(filterSetupFunction: setupFunction): currentFilterOperation!.configureCustomFilter(setupFunction(videoCamera!, currentFilterOperation!.filter, filterView!)) } diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift index 2798111a..a3e8dff7 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift @@ -47,7 +47,7 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega self.blendImage?.addTarget(currentFilterConfiguration.filter) self.blendImage?.processImage() currentFilterConfiguration.filter.addTarget(view) - case let .custom(filterSetupFunction:setupFunction): + case let .custom(filterSetupFunction: setupFunction): currentFilterConfiguration.configureCustomFilter(setupFunction(videoCamera, currentFilterConfiguration.filter, view)) } diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift index 64c90327..cb6de04e 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift @@ -91,9 +91,9 @@ extension ViewController: CameraDelegate { let bl = Position(x, y + height) let br = Position(x + width, y + height) - return [.segment(p1:tl, p2:tr), // top - .segment(p1:tr, p2:br), // right - .segment(p1:br, p2:bl), // bottom - .segment(p1:bl, p2:tl)] // left + return [.segment(p1: tl, p2: tr), // top + .segment(p1: tr, p2: br), // right + .segment(p1: br, p2: bl), // bottom + .segment(p1: bl, p2: tl)] // left } } diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index b5aab8d8..44974d7b 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -162,7 +162,7 @@ open class BasicOperation: ImageProcessingOperation { if let timestamp = framebuffer.timingStyle.timestamp { if !(timestamp < (latestTimestamp ?? timestamp)) { latestTimestamp = timestamp - renderFramebuffer.timingStyle = .videoFrame(timestamp:timestamp) + renderFramebuffer.timingStyle = .videoFrame(timestamp: timestamp) } framebuffer.unlock() diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index 39517e03..347d34b3 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -156,7 +156,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) + cameraFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) if self.runBenchmark { diff --git a/framework/Source/Mac/MovieInput.swift b/framework/Source/Mac/MovieInput.swift index b8cf8043..964a0a98 100644 --- a/framework/Source/Mac/MovieInput.swift +++ b/framework/Source/Mac/MovieInput.swift @@ -173,7 +173,7 @@ public class MovieInput: ImageSource { convertYUVToRGB(shader: self.yuvConversionShader, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: movieFramebuffer, colorConversionMatrix: conversionMatrix) CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) + movieFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime)) self.updateTargetsWithFramebuffer(movieFramebuffer) if self.runBenchmark { diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index d925ac60..25145ac2 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -411,7 +411,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer """ #endif - cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) + cameraFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) // Clean up after all done diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 086ebe02..d240b2e9 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -195,8 +195,8 @@ public class PictureInput: ImageSource { var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation var cgImage: CGImage = image.cgImage! try autoreleasepool { - let options: [CIImageOption : Any] = [.applyOrientationProperty : true, - .properties : [kCGImagePropertyOrientation : image.imageOrientation.cgImageOrientation.rawValue]] + let options: [CIImageOption: Any] = [.applyOrientationProperty: true, + .properties: [kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue]] var newImage = CIImage(cgImage: cgImage, options: options) // scale to image size let ratioW = imageSize.width / image.size.width From fc9210d8e3bd4f75bef55f7ece6061a889506028 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Fri, 7 May 2021 12:19:26 +0800 Subject: [PATCH 320/332] improve(PictureInput): better API for CIImage processes --- framework/Source/iOS/PictureInput.swift | 158 +++++++++++++++--------- 1 file changed, 102 insertions(+), 56 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index d240b2e9..0e2c6bf8 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -1,17 +1,27 @@ import OpenGLES import UIKit -public enum PictureInputTransformStep { - case scale(x: CGFloat, y: CGFloat) - case translation(tx: CGFloat, ty: CGFloat) - case rotation(angle: CGFloat) - - var isTranslation: Bool { - switch self { - case .translation: return true - default: return false - } +/// Operation on input image, which will be translated into CIImage opereation +public enum PictureInputProcessStep { + public enum AnchorPoint { + // Default anchor point for CIImage + case originPoint + // CIImage.extent.center as anchor point + case extentCenter + // Custom anchor point + case custom(point: CGPoint) } + /// Scale + case scale(x: CGFloat, y: CGFloat, anchorPoint: AnchorPoint) + /// Crop to rect. Rect values are from [0, 1] and its base is the lates extend rect of the image after previous steps. + /// **isViewCoordinate** is true indicates zero point is Left-Top corner, false indicates zero point is Left-Bottom corner. + case crop(rect: CGRect, isViewCoordinate: Bool) + /// Rotate image by angle (unit: radian) + case rotation(angle: CGFloat, anchorPoint: AnchorPoint) + /// Remember the original extent rect, rotate image by angle (unit: radian), scale by ratio, then crop to original extent rect + case rotateScaleAndKeepRect(angle: CGFloat, scale: CGFloat, anchorPoint: AnchorPoint) + /// Resize apsect ratio + case resizeAspectRatio(size: CGSize, isFill: Bool) } public enum PictureInputError: Error, CustomStringConvertible { @@ -202,7 +212,7 @@ public class PictureInput: ImageSource { let ratioW = imageSize.width / image.size.width let ratioH = imageSize.height / image.size.height let fillRatio = max(ratioW, ratioH) - newImage = newImage.scaled(by: fillRatio, roundRect: true) + newImage = newImage.scaled(fillRatio, yScale: fillRatio, roundRect: true) let displayFrame = CGRect(origin: CGPoint(x: renderTargetOffset.x * imageSize.width, y: renderTargetOffset.y * imageSize.height), size: renderTargetSize) // crop image to target display frame newImage = newImage.cropped(to: displayFrame) @@ -232,13 +242,13 @@ public class PictureInput: ImageSource { try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation, preprocessRenderInfo: preprocessRenderInfo) } - public convenience init(image: UIImage, size: CGSize?, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil, transforms: [[PictureInputTransformStep]]? = nil) throws { + public convenience init(image: UIImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil, processSteps: [PictureInputProcessStep]? = nil) throws { #if DEBUG let startTime = CACurrentMediaTime() #endif var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation var croppedCGImage: CGImage? - if let targetSize = size { + if let processSteps = processSteps, !processSteps.isEmpty { try autoreleasepool { // Get CIImage with orientation let ciImage: CIImage? @@ -256,50 +266,49 @@ public class PictureInput: ImageSource { throw PictureInputError.createImageError } - // Scale - let ratioW = targetSize.width / image.size.width - let ratioH = targetSize.height / image.size.height - let fillRatio = max(ratioW, ratioH) - newImage = newImage.scaled(by: fillRatio, roundRect: true) - - var scaleX: CGFloat = 1 - var scaleY: CGFloat = 1 - var translationX: CGFloat = 0 - var translationY: CGFloat = 0 - if let stepGroups = transforms, !stepGroups.isEmpty { - var extraTransform = CGAffineTransform.identity - for group in stepGroups { - var groupTransform = CGAffineTransform.identity - for step in group { - switch step { - case let .scale(x, y): - if group.contains(where: { $0.isTranslation }) { - scaleX *= x - scaleY *= y - } - groupTransform = groupTransform.concatenating(.init(scaleX: x, y: y)) - case let .translation(tx, ty): - translationX += (tx * scaleX * targetSize.width) - translationY += (ty * scaleY * targetSize.height) - groupTransform = groupTransform.concatenating(.init(translationX: translationX, y: translationY)) - case let .rotation(angle): - groupTransform = groupTransform.concatenating(.init(rotationAngle: angle)) - } + for step in processSteps { + switch step { + case let .scale(x, y, anchorPoint): + newImage = newImage.processedWithAnchorPoint(anchorPoint) { + $0.transformed(by: .init(scaleX: x, y: y)) + } + case let .crop(rect, isViewCoordinate): + // rasterized: [0, 1] -> [0, width/height] + let adjustedY: CGFloat = isViewCoordinate ? (1.0 - rect.maxY) : rect.origin.y + let rasterizedRect = CGRect(x: rect.origin.x * newImage.extent.size.width + newImage.extent.origin.x, + y: adjustedY * newImage.extent.size.height + newImage.extent.origin.y, + width: rect.size.width * newImage.extent.size.width, + height: rect.size.height * newImage.extent.size.height) + newImage = newImage.cropped(to: rasterizedRect) + case let .rotation(angle, anchorPoint): + newImage = newImage.processedWithAnchorPoint(anchorPoint) { + $0.transformed(by: .init(rotationAngle: angle)) } - extraTransform = extraTransform.concatenating(groupTransform) + case let .rotateScaleAndKeepRect(angle, scale, anchorPoint): + let originExtent = newImage.extent + newImage = newImage.processedWithAnchorPoint(anchorPoint) { + $0.transformed(by: .init(rotationAngle: angle)) + .transformed(by: .init(scaleX: scale, y: scale)) + } + newImage = newImage.cropped(to: originExtent) + case let .resizeAspectRatio(size, isFill): + let croppedUnscaleFrame: CGRect + if isFill { + croppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).fitRect(inside: newImage.extent) + } else { + croppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).aspectToFill(insideRect: newImage.extent) + } + let roundedCroppedUnscaleFrame = CGRect(x: croppedUnscaleFrame.origin.x.rounded(.towardZero), + y: croppedUnscaleFrame.origin.y.rounded(.towardZero), + width: croppedUnscaleFrame.width.rounded(.towardZero), + height: croppedUnscaleFrame.height.rounded(.towardZero)) + newImage = newImage.cropped(to: roundedCroppedUnscaleFrame) + let scaleRatio = size.width / roundedCroppedUnscaleFrame.width + newImage = newImage.scaled(scaleRatio, yScale: scaleRatio, roundRect: true) } - newImage = newImage.transformed(by: extraTransform) } - // Crop and generate image - let cropRect = CGRect( - x: newImage.extent.origin.x - translationX + (newImage.extent.size.width - targetSize.width) / 2, - y: newImage.extent.origin.y + translationY + (newImage.extent.size.height - targetSize.height) / 2, - width: targetSize.width, - height: targetSize.height - ) - - guard let newCgImage = PictureInput.ciContext.createCGImage(newImage, from: cropRect) else { + guard let newCgImage = PictureInput.ciContext.createCGImage(newImage, from: newImage.extent) else { throw PictureInputError.createImageError } croppedCGImage = newCgImage @@ -325,7 +334,7 @@ public class PictureInput: ImageSource { { PictureInput_pre_process : { input: { - size: \(image.size.debugRenderInfo), type: UIImage, size:\(size?.debugRenderInfo ?? ""), transforms: \(String(describing: transforms)) + size: \(image.size.debugRenderInfo), type: UIImage, processSteps: \(String(describing: processSteps)) }, output: { size: \(cgImage.width)x\(cgImage.height), type: CGImage }, time: \((CACurrentMediaTime() - startTime) * 1000.0)ms @@ -400,9 +409,46 @@ public extension CGSize { #endif } +extension CGRect { + fileprivate func fitRect(inside rect: CGRect) -> CGRect { + let scale = min(rect.width / width, rect.height / height) + let scaledSize = size.applying(CGAffineTransform(scaleX: scale, y: scale)) + let fitX = (rect.width - scaledSize.width) / 2 + rect.origin.x + let fitY = (rect.height - scaledSize.height) / 2 + rect.origin.y + return CGRect(origin: CGPoint(x: fitX, y: fitY), size: scaledSize) + } + + fileprivate func aspectToFill(insideRect boundingRect: CGRect) -> CGRect { + let widthScale = boundingRect.width / width + let heightScale = boundingRect.height / height + let scale = max(widthScale, heightScale) + var newRect = applying(CGAffineTransform(scaleX: scale, y: scale)) + newRect.origin = CGPoint(x: boundingRect.midX - newRect.size.width / 2, y: boundingRect.midY - newRect.size.height / 2) + return newRect + } +} + private extension CIImage { - func scaled(by scaleRatio: CGFloat, roundRect: Bool) -> CIImage { - let scaleTransform = CGAffineTransform(scaleX: scaleRatio, y: scaleRatio) + func processedWithAnchorPoint(_ anchorPoint: PictureInputProcessStep.AnchorPoint, processes: (CIImage) -> CIImage) -> CIImage { + switch anchorPoint { + case .originPoint: + // Do nothing since it is how CIImage works + return self + case .extentCenter: + let center = CGPoint(x: extent.midX, y: extent.midY) + let anchoredImage = transformed(by: CGAffineTransform(translationX: -center.x, y: -center.y)) + let processedImage = processes(anchoredImage) + let anchoreResetImage = processedImage.transformed(by: CGAffineTransform(translationX: center.x, y: center.y)) + return anchoreResetImage + case let .custom(point): + let anchoredImage = transformed(by: CGAffineTransform(translationX: -point.x, y: -point.y)) + let processedImage = processes(anchoredImage) + let anchoreResetImage = processedImage.transformed(by: CGAffineTransform(translationX: point.x, y: point.y)) + return anchoreResetImage + } + } + func scaled(_ xScale: CGFloat, yScale: CGFloat, roundRect: Bool) -> CIImage { + let scaleTransform = CGAffineTransform(scaleX: xScale, y: yScale) // NOTE: CIImage.extend will always return an integral rect, so if we want the accurate rect after transforming, we need to apply transform on the original rect let transformedRect = extent.applying(scaleTransform) let scaledImage = transformed(by: scaleTransform) From 1ab944609b0c803e11a2cc50dbbcb80ef7ef51b4 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 10 May 2021 16:23:28 +0800 Subject: [PATCH 321/332] improve(MoviePlayer): support using CIImage to process video frame --- framework/Source/ImageOrientation.swift | 9 ++ .../Source/iOS/FramebufferGenerator.swift | 60 +++++++++ framework/Source/iOS/MoviePlayer.swift | 10 +- framework/Source/iOS/PictureInput.swift | 107 +--------------- framework/Source/iOS/PictureProcessor.swift | 117 ++++++++++++++++++ 5 files changed, 199 insertions(+), 104 deletions(-) create mode 100644 framework/Source/iOS/PictureProcessor.swift diff --git a/framework/Source/ImageOrientation.swift b/framework/Source/ImageOrientation.swift index 060e865c..0943d490 100644 --- a/framework/Source/ImageOrientation.swift +++ b/framework/Source/ImageOrientation.swift @@ -21,6 +21,15 @@ public enum ImageOrientation { case (.landscapeRight, .portraitUpsideDown): return .rotateClockwise } } + + var cgImageOrientation: CGImagePropertyOrientation { + switch self { + case .portrait: return .up + case .portraitUpsideDown: return .down + case .landscapeLeft: return .left + case .landscapeRight: return .right + } + } } public enum Rotation { diff --git a/framework/Source/iOS/FramebufferGenerator.swift b/framework/Source/iOS/FramebufferGenerator.swift index ee74a180..a7e1e93c 100644 --- a/framework/Source/iOS/FramebufferGenerator.swift +++ b/framework/Source/iOS/FramebufferGenerator.swift @@ -31,6 +31,14 @@ public class FramebufferGenerator { } return pixelBuffer } + + public func processAndGenerateFromBuffer(_ pixelBuffer: CVPixelBuffer, frameTime: CMTime, processSteps: [PictureInputProcessStep], videoOrientation: ImageOrientation) -> Framebuffer? { + var framebuffer: Framebuffer? + sharedImageProcessingContext.runOperationSynchronously { + framebuffer = _processAndGenerateFromBuffer(pixelBuffer, frameTime: frameTime, processSteps: processSteps, videoOrientation: videoOrientation) + } + return framebuffer + } } private extension FramebufferGenerator { @@ -46,6 +54,7 @@ private extension FramebufferGenerator { } func _generateFromYUVBuffer(_ yuvPixelBuffer: CVPixelBuffer, frameTime: CMTime, videoOrientation: ImageOrientation) -> Framebuffer? { +// let startTime = CACurrentMediaTime() guard let yuvConversionShader = yuvConversionShader else { debugPrint("ERROR! yuvConversionShader hasn't been setup before starting") return nil @@ -129,6 +138,9 @@ private extension FramebufferGenerator { resultFramebuffer: framebuffer, colorConversionMatrix: conversionMatrix) framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(frameTime)) + +// debugPrint("Generated framebuffer from CVPixelBuffer. time: \(CACurrentMediaTime() - startTime)") + return framebuffer } @@ -173,6 +185,52 @@ private extension FramebufferGenerator { return pixelBuffer } + func _processAndGenerateFromBuffer(_ yuvPixelBuffer: CVPixelBuffer, frameTime: CMTime, processSteps: [PictureInputProcessStep], videoOrientation: ImageOrientation) -> Framebuffer? { +// let startTime = CACurrentMediaTime() + CVPixelBufferLockBaseAddress(yuvPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + defer { + CVPixelBufferUnlockBaseAddress(yuvPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) + CVOpenGLESTextureCacheFlush(sharedImageProcessingContext.coreVideoTextureCache, 0) + } + + let ciImage = CIImage(cvPixelBuffer: yuvPixelBuffer, + options: [.applyOrientationProperty: true, + .properties: [ kCGImagePropertyOrientation: videoOrientation.cgImageOrientation.rawValue ]]) + var processStepsWithCoordinateCorrection = processSteps + // NOTE: CIImage coordinate is mirrored compared with OpenGLES when calling draw(_:in:size:from:), so it needs to be mirrored before render to OpenGL + processStepsWithCoordinateCorrection.append(.scale(x: 1, y: -1, anchorPoint: .extentCenter)) + let processedImage = ciImage.processed(with: processStepsWithCoordinateCorrection) + +// debugPrint("Process CIImage. time: \(CACurrentMediaTime() - startTime)") + + let bufferHeight = Int32(processedImage.extent.height) + let bufferWidth = Int32(processedImage.extent.width) + + let portraitSize: GLSize + switch videoOrientation.rotationNeededForOrientation(.portrait) { + case .noRotation, .rotate180, .flipHorizontally, .flipVertically: + portraitSize = GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)) + case .rotateCounterclockwise, .rotateClockwise, .rotateClockwiseAndFlipVertically, .rotateClockwiseAndFlipHorizontally: + portraitSize = GLSize(width: GLint(bufferHeight), height: GLint(bufferWidth)) + } + + let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: portraitSize, textureOnly: false) + framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(frameTime)) + + // Bind texture + framebuffer.activateFramebufferForRendering() + glBindTexture(GLenum(GL_TEXTURE_2D), framebuffer.texture) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)) + glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)) + + // TODO: + CIImage.glBackedContext.draw(processedImage, in: CGRect(origin: .zero, size: processedImage.extent.size), from: processedImage.extent) + +// debugPrint("Reneder CIImage to OpenGL texture. time: \(CACurrentMediaTime() - startTime)") + + return framebuffer + } + func _createPixelBufferPool(_ width: Int32, _ height: Int32, _ pixelFormat: FourCharCode, _ maxBufferCount: Int32) -> CVPixelBufferPool? { var outputPool: CVPixelBufferPool? @@ -180,6 +238,8 @@ private extension FramebufferGenerator { kCVPixelBufferWidthKey: width, kCVPixelBufferHeightKey: height, kCVPixelFormatOpenGLESCompatibility: true, + kCVPixelBufferIOSurfaceCoreAnimationCompatibilityKey: true, + kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: true, kCVPixelBufferIOSurfacePropertiesKey: NSDictionary()] let pixelBufferPoolOptions: NSDictionary = [kCVPixelBufferPoolMinimumBufferCountKey: maxBufferCount] diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index 123bce55..f2933cc5 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -42,6 +42,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { public private(set) var isPlaying = false public var lastPlayerItem: AVPlayerItem? public var playableItem: AVPlayerItem? { currentItem ?? lastPlayerItem } + public var processSteps: [PictureInputProcessStep]? var displayLink: CADisplayLink? @@ -621,7 +622,14 @@ private extension MoviePlayer { } } - guard hasTarget, let framebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: timeForDisplay, videoOrientation: videoOrientation) else { return } + guard hasTarget else { return } + let newFramebuffer: Framebuffer? + if let processSteps = processSteps, !processSteps.isEmpty { + newFramebuffer = framebufferGenerator.processAndGenerateFromBuffer(pixelBuffer, frameTime: timeForDisplay, processSteps: processSteps, videoOrientation: videoOrientation) + } else { + newFramebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: timeForDisplay, videoOrientation: videoOrientation) + } + guard let framebuffer = newFramebuffer else { return } framebuffer.userInfo = framebufferUserInfo #if DEBUG diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 0e2c6bf8..c08f768d 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -21,7 +21,7 @@ public enum PictureInputProcessStep { /// Remember the original extent rect, rotate image by angle (unit: radian), scale by ratio, then crop to original extent rect case rotateScaleAndKeepRect(angle: CGFloat, scale: CGFloat, anchorPoint: AnchorPoint) /// Resize apsect ratio - case resizeAspectRatio(size: CGSize, isFill: Bool) + case resizeAspectRatio(size: CGSize, isFill: Bool, allowUpScale: Bool) } public enum PictureInputError: Error, CustomStringConvertible { @@ -54,7 +54,6 @@ public class PictureInput: ImageSource { public var framebufferUserInfo: [AnyHashable: Any]? public let imageName: String var hasProcessedImage: Bool = false - private static var ciContext = CIContext(options: nil) #if DEBUG public var printDebugRenderInfos = true public var debugRenderInfo: String = "" @@ -216,7 +215,7 @@ public class PictureInput: ImageSource { let displayFrame = CGRect(origin: CGPoint(x: renderTargetOffset.x * imageSize.width, y: renderTargetOffset.y * imageSize.height), size: renderTargetSize) // crop image to target display frame newImage = newImage.cropped(to: displayFrame) - guard let newCgImage = PictureInput.ciContext.createCGImage(newImage, from: newImage.extent) else { + guard let newCgImage = newImage.renderToCGImage() else { throw PictureInputError.createImageError } cgImage = newCgImage @@ -262,53 +261,7 @@ public class PictureInput: ImageSource { ] ]) } - guard var newImage = ciImage else { - throw PictureInputError.createImageError - } - - for step in processSteps { - switch step { - case let .scale(x, y, anchorPoint): - newImage = newImage.processedWithAnchorPoint(anchorPoint) { - $0.transformed(by: .init(scaleX: x, y: y)) - } - case let .crop(rect, isViewCoordinate): - // rasterized: [0, 1] -> [0, width/height] - let adjustedY: CGFloat = isViewCoordinate ? (1.0 - rect.maxY) : rect.origin.y - let rasterizedRect = CGRect(x: rect.origin.x * newImage.extent.size.width + newImage.extent.origin.x, - y: adjustedY * newImage.extent.size.height + newImage.extent.origin.y, - width: rect.size.width * newImage.extent.size.width, - height: rect.size.height * newImage.extent.size.height) - newImage = newImage.cropped(to: rasterizedRect) - case let .rotation(angle, anchorPoint): - newImage = newImage.processedWithAnchorPoint(anchorPoint) { - $0.transformed(by: .init(rotationAngle: angle)) - } - case let .rotateScaleAndKeepRect(angle, scale, anchorPoint): - let originExtent = newImage.extent - newImage = newImage.processedWithAnchorPoint(anchorPoint) { - $0.transformed(by: .init(rotationAngle: angle)) - .transformed(by: .init(scaleX: scale, y: scale)) - } - newImage = newImage.cropped(to: originExtent) - case let .resizeAspectRatio(size, isFill): - let croppedUnscaleFrame: CGRect - if isFill { - croppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).fitRect(inside: newImage.extent) - } else { - croppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).aspectToFill(insideRect: newImage.extent) - } - let roundedCroppedUnscaleFrame = CGRect(x: croppedUnscaleFrame.origin.x.rounded(.towardZero), - y: croppedUnscaleFrame.origin.y.rounded(.towardZero), - width: croppedUnscaleFrame.width.rounded(.towardZero), - height: croppedUnscaleFrame.height.rounded(.towardZero)) - newImage = newImage.cropped(to: roundedCroppedUnscaleFrame) - let scaleRatio = size.width / roundedCroppedUnscaleFrame.width - newImage = newImage.scaled(scaleRatio, yScale: scaleRatio, roundRect: true) - } - } - - guard let newCgImage = PictureInput.ciContext.createCGImage(newImage, from: newImage.extent) else { + guard let newCgImage = ciImage?.processed(with: processSteps).renderToCGImage() else { throw PictureInputError.createImageError } croppedCGImage = newCgImage @@ -318,7 +271,7 @@ public class PictureInput: ImageSource { let ciImage = CIImage(image: image, options: [.applyOrientationProperty: true, .properties: [ kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue ]]), - let rotatedImage = PictureInput.ciContext.createCGImage(ciImage, from: ciImage.extent) { + let rotatedImage = ciImage.renderToCGImage() { // Rotated correct orientation croppedCGImage = rotatedImage } else { @@ -408,55 +361,3 @@ public extension CGSize { var debugRenderInfo: String { "\(width)x\(height)" } #endif } - -extension CGRect { - fileprivate func fitRect(inside rect: CGRect) -> CGRect { - let scale = min(rect.width / width, rect.height / height) - let scaledSize = size.applying(CGAffineTransform(scaleX: scale, y: scale)) - let fitX = (rect.width - scaledSize.width) / 2 + rect.origin.x - let fitY = (rect.height - scaledSize.height) / 2 + rect.origin.y - return CGRect(origin: CGPoint(x: fitX, y: fitY), size: scaledSize) - } - - fileprivate func aspectToFill(insideRect boundingRect: CGRect) -> CGRect { - let widthScale = boundingRect.width / width - let heightScale = boundingRect.height / height - let scale = max(widthScale, heightScale) - var newRect = applying(CGAffineTransform(scaleX: scale, y: scale)) - newRect.origin = CGPoint(x: boundingRect.midX - newRect.size.width / 2, y: boundingRect.midY - newRect.size.height / 2) - return newRect - } -} - -private extension CIImage { - func processedWithAnchorPoint(_ anchorPoint: PictureInputProcessStep.AnchorPoint, processes: (CIImage) -> CIImage) -> CIImage { - switch anchorPoint { - case .originPoint: - // Do nothing since it is how CIImage works - return self - case .extentCenter: - let center = CGPoint(x: extent.midX, y: extent.midY) - let anchoredImage = transformed(by: CGAffineTransform(translationX: -center.x, y: -center.y)) - let processedImage = processes(anchoredImage) - let anchoreResetImage = processedImage.transformed(by: CGAffineTransform(translationX: center.x, y: center.y)) - return anchoreResetImage - case let .custom(point): - let anchoredImage = transformed(by: CGAffineTransform(translationX: -point.x, y: -point.y)) - let processedImage = processes(anchoredImage) - let anchoreResetImage = processedImage.transformed(by: CGAffineTransform(translationX: point.x, y: point.y)) - return anchoreResetImage - } - } - func scaled(_ xScale: CGFloat, yScale: CGFloat, roundRect: Bool) -> CIImage { - let scaleTransform = CGAffineTransform(scaleX: xScale, y: yScale) - // NOTE: CIImage.extend will always return an integral rect, so if we want the accurate rect after transforming, we need to apply transform on the original rect - let transformedRect = extent.applying(scaleTransform) - let scaledImage = transformed(by: scaleTransform) - if roundRect { - let originRoundedImage = scaledImage.transformed(by: CGAffineTransform(translationX: transformedRect.origin.x.rounded(.towardZero) - transformedRect.origin.x, y: transformedRect.origin.y.rounded(.towardZero) - transformedRect.origin.y)) - return originRoundedImage - } else { - return scaledImage - } - } -} diff --git a/framework/Source/iOS/PictureProcessor.swift b/framework/Source/iOS/PictureProcessor.swift new file mode 100644 index 00000000..3c3b0e6d --- /dev/null +++ b/framework/Source/iOS/PictureProcessor.swift @@ -0,0 +1,117 @@ +// +// PictureProcessor.swift +// GPUImage2 +// +// Created by 陈品霖 on 2021/5/8. +// + +import Foundation + +extension CIImage { + /// Shared CIContext to improve performance + static var glBackedContext = CIContext(eaglContext: sharedImageProcessingContext.context) + + func processed(with processSteps: [PictureInputProcessStep]?) -> CIImage { + guard let processSteps = processSteps, !processSteps.isEmpty else { return self } + var newImage = self + for step in processSteps { + switch step { + case let .scale(x, y, anchorPoint): + newImage = newImage.processedWithAnchorPoint(anchorPoint) { + $0.transformed(by: .init(scaleX: x, y: y)) + } + case let .crop(rect, isViewCoordinate): + // rasterized: [0, 1] -> [0, width/height] + let adjustedY: CGFloat = isViewCoordinate ? (1.0 - rect.maxY) : rect.origin.y + let rasterizedRect = CGRect(x: rect.origin.x * newImage.extent.size.width + newImage.extent.origin.x, + y: adjustedY * newImage.extent.size.height + newImage.extent.origin.y, + width: rect.size.width * newImage.extent.size.width, + height: rect.size.height * newImage.extent.size.height) + newImage = newImage.cropped(to: rasterizedRect) + case let .rotation(angle, anchorPoint): + newImage = newImage.processedWithAnchorPoint(anchorPoint) { + $0.transformed(by: .init(rotationAngle: angle)) + } + case let .rotateScaleAndKeepRect(angle, scale, anchorPoint): + let originExtent = newImage.extent + newImage = newImage.processedWithAnchorPoint(anchorPoint) { + $0.transformed(by: .init(rotationAngle: angle)) + .transformed(by: .init(scaleX: scale, y: scale)) + } + newImage = newImage.cropped(to: originExtent) + case let .resizeAspectRatio(size, isFill, allowUpScale): + let croppedUnscaleFrame: CGRect + if isFill { + croppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).fitRect(inside: newImage.extent) + } else { + croppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).aspectToFill(insideRect: newImage.extent) + } + let roundedCroppedUnscaleFrame = CGRect(x: croppedUnscaleFrame.origin.x.rounded(.towardZero), + y: croppedUnscaleFrame.origin.y.rounded(.towardZero), + width: croppedUnscaleFrame.width.rounded(.towardZero), + height: croppedUnscaleFrame.height.rounded(.towardZero)) + newImage = newImage.cropped(to: roundedCroppedUnscaleFrame) + let scaleRatio = size.width / roundedCroppedUnscaleFrame.width + if scaleRatio < 1 || allowUpScale { + newImage = newImage.scaled(scaleRatio, yScale: scaleRatio, roundRect: true) + } + } + } + return newImage + } + + func processedWithAnchorPoint(_ anchorPoint: PictureInputProcessStep.AnchorPoint, processes: (CIImage) -> CIImage) -> CIImage { + switch anchorPoint { + case .originPoint: + // Do nothing since it is how CIImage works + return self + case .extentCenter: + let center = CGPoint(x: extent.midX, y: extent.midY) + let anchoredImage = transformed(by: CGAffineTransform(translationX: -center.x, y: -center.y)) + let processedImage = processes(anchoredImage) + let anchoreResetImage = processedImage.transformed(by: CGAffineTransform(translationX: center.x, y: center.y)) + return anchoreResetImage + case let .custom(point): + let anchoredImage = transformed(by: CGAffineTransform(translationX: -point.x, y: -point.y)) + let processedImage = processes(anchoredImage) + let anchoreResetImage = processedImage.transformed(by: CGAffineTransform(translationX: point.x, y: point.y)) + return anchoreResetImage + } + } + + func scaled(_ xScale: CGFloat, yScale: CGFloat, roundRect: Bool) -> CIImage { + let scaleTransform = CGAffineTransform(scaleX: xScale, y: yScale) + // NOTE: CIImage.extend will always return an integral rect, so if we want the accurate rect after transforming, we need to apply transform on the original rect + let transformedRect = extent.applying(scaleTransform) + let scaledImage = transformed(by: scaleTransform) + if roundRect { + let originRoundedImage = scaledImage.transformed(by: CGAffineTransform(translationX: transformedRect.origin.x.rounded(.towardZero) - transformedRect.origin.x, y: transformedRect.origin.y.rounded(.towardZero) - transformedRect.origin.y)) + return originRoundedImage + } else { + return scaledImage + } + } + + func renderToCGImage() -> CGImage? { + return Self.glBackedContext.createCGImage(self, from: extent) + } +} + +fileprivate extension CGRect { + func fitRect(inside rect: CGRect) -> CGRect { + let scale = min(rect.width / width, rect.height / height) + let scaledSize = size.applying(CGAffineTransform(scaleX: scale, y: scale)) + let fitX = (rect.width - scaledSize.width) / 2 + rect.origin.x + let fitY = (rect.height - scaledSize.height) / 2 + rect.origin.y + return CGRect(origin: CGPoint(x: fitX, y: fitY), size: scaledSize) + } + + func aspectToFill(insideRect boundingRect: CGRect) -> CGRect { + let widthScale = boundingRect.width / width + let heightScale = boundingRect.height / height + let scale = max(widthScale, heightScale) + var newRect = applying(CGAffineTransform(scaleX: scale, y: scale)) + newRect.origin = CGPoint(x: boundingRect.midX - newRect.size.width / 2, y: boundingRect.midY - newRect.size.height / 2) + return newRect + } +} From a414bcae43736187f270930abbeb5291a1f22c28 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 10 May 2021 22:21:42 +0800 Subject: [PATCH 322/332] chore: add a new API for SerialDispatch with sync as parameter --- framework/Source/SerialDispatch.swift | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index c408fbd0..f6819264 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -79,6 +79,14 @@ public extension SerialDispatch { } } + func runOperation(sync: Bool, _ operation:@escaping () -> Void) { + if sync { + runOperationSynchronously(operation) + } else { + runOperationAsynchronously(operation) + } + } + func runOperationAsynchronously(_ operation:@escaping () -> Void) { self.serialDispatchQueue.async { self.executeStartTime = CACurrentMediaTime() From 200907a970cb2f93c0f62cec41cdfecca9bc0076 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Mon, 10 May 2021 23:58:30 +0800 Subject: [PATCH 323/332] improve(MoviePlayer): improve CIImage extend rect precision --- .../Source/iOS/FramebufferGenerator.swift | 7 +- framework/Source/iOS/PictureInput.swift | 4 +- framework/Source/iOS/PictureProcessor.swift | 110 ++++++++++++------ 3 files changed, 80 insertions(+), 41 deletions(-) diff --git a/framework/Source/iOS/FramebufferGenerator.swift b/framework/Source/iOS/FramebufferGenerator.swift index a7e1e93c..4c145271 100644 --- a/framework/Source/iOS/FramebufferGenerator.swift +++ b/framework/Source/iOS/FramebufferGenerator.swift @@ -219,13 +219,14 @@ private extension FramebufferGenerator { // Bind texture framebuffer.activateFramebufferForRendering() + clearFramebufferWithColor(Color.black) glBindTexture(GLenum(GL_TEXTURE_2D), framebuffer.texture) glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE)) glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)) - // TODO: - CIImage.glBackedContext.draw(processedImage, in: CGRect(origin: .zero, size: processedImage.extent.size), from: processedImage.extent) - + // TODO: this API performance is slower than Crop filter, improve this later + CIImage.glBackedContext.draw(processedImage, in: CGRect(origin: .zero, size: processedImage.accurateExtent.rounded(.towardZero).size), from: processedImage.accurateExtent.rounded(.towardZero)) + // debugPrint("Reneder CIImage to OpenGL texture. time: \(CACurrentMediaTime() - startTime)") return framebuffer diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index c08f768d..5e9229fa 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -211,10 +211,10 @@ public class PictureInput: ImageSource { let ratioW = imageSize.width / image.size.width let ratioH = imageSize.height / image.size.height let fillRatio = max(ratioW, ratioH) - newImage = newImage.scaled(fillRatio, yScale: fillRatio, roundRect: true) + newImage = newImage.accurateTransformed(by: .init(scaleX: fillRatio, y: fillRatio)) let displayFrame = CGRect(origin: CGPoint(x: renderTargetOffset.x * imageSize.width, y: renderTargetOffset.y * imageSize.height), size: renderTargetSize) // crop image to target display frame - newImage = newImage.cropped(to: displayFrame) + newImage = newImage.accurateCropped(to: displayFrame) guard let newCgImage = newImage.renderToCGImage() else { throw PictureInputError.createImageError } diff --git a/framework/Source/iOS/PictureProcessor.swift b/framework/Source/iOS/PictureProcessor.swift index 3c3b0e6d..9a566eb0 100644 --- a/framework/Source/iOS/PictureProcessor.swift +++ b/framework/Source/iOS/PictureProcessor.swift @@ -18,42 +18,40 @@ extension CIImage { switch step { case let .scale(x, y, anchorPoint): newImage = newImage.processedWithAnchorPoint(anchorPoint) { - $0.transformed(by: .init(scaleX: x, y: y)) + let transform = CGAffineTransform(scaleX: x, y: y) + return $0.accurateTransformed(by: transform) } case let .crop(rect, isViewCoordinate): // rasterized: [0, 1] -> [0, width/height] let adjustedY: CGFloat = isViewCoordinate ? (1.0 - rect.maxY) : rect.origin.y - let rasterizedRect = CGRect(x: rect.origin.x * newImage.extent.size.width + newImage.extent.origin.x, - y: adjustedY * newImage.extent.size.height + newImage.extent.origin.y, - width: rect.size.width * newImage.extent.size.width, - height: rect.size.height * newImage.extent.size.height) - newImage = newImage.cropped(to: rasterizedRect) + let rasterizedRect = CGRect(x: rect.origin.x * newImage.accurateExtent.size.width + newImage.accurateExtent.origin.x, + y: adjustedY * newImage.accurateExtent.size.height + newImage.accurateExtent.origin.y, + width: rect.size.width * newImage.accurateExtent.size.width, + height: rect.size.height * newImage.accurateExtent.size.height).rounded() + newImage = newImage.accurateCropped(to: rasterizedRect) case let .rotation(angle, anchorPoint): newImage = newImage.processedWithAnchorPoint(anchorPoint) { - $0.transformed(by: .init(rotationAngle: angle)) + let transform = CGAffineTransform(rotationAngle: angle) + return $0.accurateTransformed(by: transform) } case let .rotateScaleAndKeepRect(angle, scale, anchorPoint): - let originExtent = newImage.extent + let originExtent = newImage.accurateExtent newImage = newImage.processedWithAnchorPoint(anchorPoint) { - $0.transformed(by: .init(rotationAngle: angle)) - .transformed(by: .init(scaleX: scale, y: scale)) + let transform = CGAffineTransform(rotationAngle: angle).scaledBy(x: scale, y: scale) + return $0.accurateTransformed(by: transform) } - newImage = newImage.cropped(to: originExtent) + newImage = newImage.accurateCropped(to: originExtent) case let .resizeAspectRatio(size, isFill, allowUpScale): - let croppedUnscaleFrame: CGRect + let roundedCroppedUnscaleFrame: CGRect if isFill { - croppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).fitRect(inside: newImage.extent) + roundedCroppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).fitRect(inside: newImage.accurateExtent).rounded() } else { - croppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).aspectToFill(insideRect: newImage.extent) + roundedCroppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).aspectToFill(insideRect: newImage.accurateExtent).rounded() } - let roundedCroppedUnscaleFrame = CGRect(x: croppedUnscaleFrame.origin.x.rounded(.towardZero), - y: croppedUnscaleFrame.origin.y.rounded(.towardZero), - width: croppedUnscaleFrame.width.rounded(.towardZero), - height: croppedUnscaleFrame.height.rounded(.towardZero)) - newImage = newImage.cropped(to: roundedCroppedUnscaleFrame) + newImage = newImage.accurateCropped(to: roundedCroppedUnscaleFrame) let scaleRatio = size.width / roundedCroppedUnscaleFrame.width if scaleRatio < 1 || allowUpScale { - newImage = newImage.scaled(scaleRatio, yScale: scaleRatio, roundRect: true) + newImage = newImage.accurateTransformed(by: .init(scaleX: scaleRatio, y: scaleRatio)) } } } @@ -66,38 +64,67 @@ extension CIImage { // Do nothing since it is how CIImage works return self case .extentCenter: - let center = CGPoint(x: extent.midX, y: extent.midY) - let anchoredImage = transformed(by: CGAffineTransform(translationX: -center.x, y: -center.y)) + let center = CGPoint(x: accurateExtent.midX, y: accurateExtent.midY) + let anchoredImage = accurateTransformed(by: CGAffineTransform(translationX: -center.x, y: -center.y)) let processedImage = processes(anchoredImage) - let anchoreResetImage = processedImage.transformed(by: CGAffineTransform(translationX: center.x, y: center.y)) + let anchoreResetImage = processedImage.accurateTransformed(by: CGAffineTransform(translationX: center.x, y: center.y)) return anchoreResetImage case let .custom(point): - let anchoredImage = transformed(by: CGAffineTransform(translationX: -point.x, y: -point.y)) + let anchoredImage = accurateTransformed(by: CGAffineTransform(translationX: -point.x, y: -point.y)) let processedImage = processes(anchoredImage) - let anchoreResetImage = processedImage.transformed(by: CGAffineTransform(translationX: point.x, y: point.y)) + let anchoreResetImage = processedImage.accurateTransformed(by: CGAffineTransform(translationX: point.x, y: point.y)) return anchoreResetImage } } - func scaled(_ xScale: CGFloat, yScale: CGFloat, roundRect: Bool) -> CIImage { - let scaleTransform = CGAffineTransform(scaleX: xScale, y: yScale) - // NOTE: CIImage.extend will always return an integral rect, so if we want the accurate rect after transforming, we need to apply transform on the original rect - let transformedRect = extent.applying(scaleTransform) - let scaledImage = transformed(by: scaleTransform) - if roundRect { - let originRoundedImage = scaledImage.transformed(by: CGAffineTransform(translationX: transformedRect.origin.x.rounded(.towardZero) - transformedRect.origin.x, y: transformedRect.origin.y.rounded(.towardZero) - transformedRect.origin.y)) - return originRoundedImage + func accurateTransformed(by transform: CGAffineTransform, rounded: Bool = true) -> CIImage { + let transformedRect = accurateExtent.applying(transform) + let transformedImage: CIImage + if rounded && transformedRect.rounded() != transformedRect { + let sizeRoundedTransform = transform.scaledBy(x: transformedRect.rounded().width / transformedRect.width, y: transformedRect.rounded().height / transformedRect.height) + let sizeRoundedRect = accurateExtent.applying(sizeRoundedTransform) + let positionRoundedRect = sizeRoundedRect.rounded(.towardZero) + let positionRoundedTransform = sizeRoundedTransform.translatedBy(x: positionRoundedRect.origin.x - sizeRoundedRect.origin.x, + y: positionRoundedRect.origin.y - sizeRoundedRect.origin.y) + transformedImage = transformed(by: positionRoundedTransform) + transformedImage.accurateExtent = accurateExtent.applying(positionRoundedTransform) } else { - return scaledImage + transformedImage = transformed(by: transform) + transformedImage.accurateExtent = transformedRect } + return transformedImage + } + + func accurateCropped(to rect: CGRect) -> CIImage { + let croppedImage = cropped(to: rect) + croppedImage.accurateExtent = croppedImage.extent + return croppedImage } func renderToCGImage() -> CGImage? { - return Self.glBackedContext.createCGImage(self, from: extent) + return Self.glBackedContext.createCGImage(self, from: accurateExtent.rounded(.towardZero)) + } + + private static var _accurateExtentKey = 0 + + // NOTE: CIImage.extend will always return an integral rect, so if we want the accurate rect after transforming, we need to apply transform on the original rect + var accurateExtent: CGRect { + get { (objc_getAssociatedObject(self, &Self._accurateExtentKey) as? NSValue)?.cgRectValue ?? extent } + set { objc_setAssociatedObject(self, &Self._accurateExtentKey, NSValue(cgRect: newValue), .OBJC_ASSOCIATION_RETAIN_NONATOMIC) } + } + + // Return the original rect if every number is integral, or it will thrink by 1 point in border + var trimmedExtent: CGRect { + let accurateExtent = accurateExtent + if accurateExtent.integral != accurateExtent { + return CGRect(x: accurateExtent.origin.x + 1, y: accurateExtent.origin.y + 1, width: accurateExtent.size.width - 2, height: accurateExtent.size.height - 2) + } else { + return accurateExtent + } } } -fileprivate extension CGRect { +extension CGRect { func fitRect(inside rect: CGRect) -> CGRect { let scale = min(rect.width / width, rect.height / height) let scaledSize = size.applying(CGAffineTransform(scaleX: scale, y: scale)) @@ -114,4 +141,15 @@ fileprivate extension CGRect { newRect.origin = CGPoint(x: boundingRect.midX - newRect.size.width / 2, y: boundingRect.midY - newRect.size.height / 2) return newRect } + + func rounded(_ rule: FloatingPointRoundingRule = .toNearestOrAwayFromZero) -> CGRect { + return CGRect(x: origin.x.rounded(rule), y: origin.y.rounded(rule), width: size.width.rounded(rule), height: size.height.rounded(rule)) + } +} + +fileprivate extension CGAffineTransform { + func roundedXYTransform(for rect: CGRect) -> CGAffineTransform { + let transformedRect = rect.applying(self) + return translatedBy(x: transformedRect.origin.x.rounded(.towardZero) - transformedRect.origin.x, y: transformedRect.origin.y.rounded(.towardZero) - transformedRect.origin.y) + } } From ef2782a871d38b7b2026e1ecfa1ff7effd6bc996 Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 11 May 2021 10:56:55 +0800 Subject: [PATCH 324/332] improve(MovieInput): support process steps on decoded frame --- framework/Source/iOS/MovieInput.swift | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index dcbc71e3..cef4984a 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -80,6 +80,7 @@ public class MovieInput: ImageSource { var movieFramebuffer: Framebuffer? public var framebufferUserInfo: [AnyHashable: Any]? + public var processSteps: [PictureInputProcessStep]? #if DEBUG public var debugRenderInfo: String = "" @@ -464,8 +465,13 @@ public class MovieInput: ImageSource { func process(movieFrame: CVPixelBuffer, withSampleTime: CMTime) { let startTime = CACurrentMediaTime() - - guard let framebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: withSampleTime, videoOrientation: videoOrientation) else { + var outputFramebuffer: Framebuffer? + if let processSteps = processSteps, !processSteps.isEmpty { + outputFramebuffer = framebufferGenerator.processAndGenerateFromBuffer(movieFrame, frameTime: withSampleTime, processSteps: processSteps, videoOrientation: videoOrientation) + } else { + outputFramebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: withSampleTime, videoOrientation: videoOrientation) + } + guard let framebuffer = outputFramebuffer else { print("Cannot generate framebuffer from YUVBuffer") return } From 99a7abfc81dc5fc49e4a385b732f682d461306ba Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 11 May 2021 19:50:19 +0800 Subject: [PATCH 325/332] improve: skip trival process step --- framework/Source/iOS/PictureProcessor.swift | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/framework/Source/iOS/PictureProcessor.swift b/framework/Source/iOS/PictureProcessor.swift index 9a566eb0..d0ae3e46 100644 --- a/framework/Source/iOS/PictureProcessor.swift +++ b/framework/Source/iOS/PictureProcessor.swift @@ -17,11 +17,13 @@ extension CIImage { for step in processSteps { switch step { case let .scale(x, y, anchorPoint): + guard x != 1.0 || y != 1.0 else { continue } newImage = newImage.processedWithAnchorPoint(anchorPoint) { let transform = CGAffineTransform(scaleX: x, y: y) return $0.accurateTransformed(by: transform) } case let .crop(rect, isViewCoordinate): + guard rect.origin != .zero || rect.size != CGSize(width: 1.0, height: 1.0) else { continue } // rasterized: [0, 1] -> [0, width/height] let adjustedY: CGFloat = isViewCoordinate ? (1.0 - rect.maxY) : rect.origin.y let rasterizedRect = CGRect(x: rect.origin.x * newImage.accurateExtent.size.width + newImage.accurateExtent.origin.x, @@ -30,11 +32,13 @@ extension CIImage { height: rect.size.height * newImage.accurateExtent.size.height).rounded() newImage = newImage.accurateCropped(to: rasterizedRect) case let .rotation(angle, anchorPoint): + guard angle != 0 else { continue } newImage = newImage.processedWithAnchorPoint(anchorPoint) { let transform = CGAffineTransform(rotationAngle: angle) return $0.accurateTransformed(by: transform) } case let .rotateScaleAndKeepRect(angle, scale, anchorPoint): + guard angle != 0 || scale != 0 else { continue } let originExtent = newImage.accurateExtent newImage = newImage.processedWithAnchorPoint(anchorPoint) { let transform = CGAffineTransform(rotationAngle: angle).scaledBy(x: scale, y: scale) @@ -42,6 +46,7 @@ extension CIImage { } newImage = newImage.accurateCropped(to: originExtent) case let .resizeAspectRatio(size, isFill, allowUpScale): + guard size != .zero else { continue } let roundedCroppedUnscaleFrame: CGRect if isFill { roundedCroppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).fitRect(inside: newImage.accurateExtent).rounded() From c8ff521af0a23c411882e9d13142c5baec76302c Mon Sep 17 00:00:00 2001 From: Pinlin Date: Tue, 11 May 2021 19:51:15 +0800 Subject: [PATCH 326/332] fix(MoviePlayer): orientation and size --- framework/Source/iOS/FramebufferGenerator.swift | 10 +--------- framework/Source/iOS/MoviePlayer.swift | 2 +- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/framework/Source/iOS/FramebufferGenerator.swift b/framework/Source/iOS/FramebufferGenerator.swift index 4c145271..79725eaf 100644 --- a/framework/Source/iOS/FramebufferGenerator.swift +++ b/framework/Source/iOS/FramebufferGenerator.swift @@ -206,15 +206,7 @@ private extension FramebufferGenerator { let bufferHeight = Int32(processedImage.extent.height) let bufferWidth = Int32(processedImage.extent.width) - let portraitSize: GLSize - switch videoOrientation.rotationNeededForOrientation(.portrait) { - case .noRotation, .rotate180, .flipHorizontally, .flipVertically: - portraitSize = GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)) - case .rotateCounterclockwise, .rotateClockwise, .rotateClockwiseAndFlipVertically, .rotateClockwiseAndFlipHorizontally: - portraitSize = GLSize(width: GLint(bufferHeight), height: GLint(bufferWidth)) - } - - let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: portraitSize, textureOnly: false) + let framebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: false) framebuffer.timingStyle = .videoFrame(timestamp: Timestamp(frameTime)) // Bind texture diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index f2933cc5..d7b569bb 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -68,7 +68,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { } public var videoOrientation: ImageOrientation { guard let asset = asset else { return .portrait } - return asset.imageOrientation ?? .portrait + return asset.originalOrientation ?? .portrait } // NOTE: be careful, this property might block your thread since it needs to access currentTime public var didPlayToEnd: Bool { From 0a8f5cfebb93fda4b1b21782e211c70c8636e38e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 12 May 2021 14:00:32 +0800 Subject: [PATCH 327/332] improve: update code style and comments --- framework/Source/iOS/PictureInput.swift | 23 -------- framework/Source/iOS/PictureProcessor.swift | 62 ++++++++++++++------- 2 files changed, 41 insertions(+), 44 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 5e9229fa..571cd70c 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -1,29 +1,6 @@ import OpenGLES import UIKit -/// Operation on input image, which will be translated into CIImage opereation -public enum PictureInputProcessStep { - public enum AnchorPoint { - // Default anchor point for CIImage - case originPoint - // CIImage.extent.center as anchor point - case extentCenter - // Custom anchor point - case custom(point: CGPoint) - } - /// Scale - case scale(x: CGFloat, y: CGFloat, anchorPoint: AnchorPoint) - /// Crop to rect. Rect values are from [0, 1] and its base is the lates extend rect of the image after previous steps. - /// **isViewCoordinate** is true indicates zero point is Left-Top corner, false indicates zero point is Left-Bottom corner. - case crop(rect: CGRect, isViewCoordinate: Bool) - /// Rotate image by angle (unit: radian) - case rotation(angle: CGFloat, anchorPoint: AnchorPoint) - /// Remember the original extent rect, rotate image by angle (unit: radian), scale by ratio, then crop to original extent rect - case rotateScaleAndKeepRect(angle: CGFloat, scale: CGFloat, anchorPoint: AnchorPoint) - /// Resize apsect ratio - case resizeAspectRatio(size: CGSize, isFill: Bool, allowUpScale: Bool) -} - public enum PictureInputError: Error, CustomStringConvertible { case zeroSizedImageError case dataProviderNilError diff --git a/framework/Source/iOS/PictureProcessor.swift b/framework/Source/iOS/PictureProcessor.swift index d0ae3e46..040ed9cf 100644 --- a/framework/Source/iOS/PictureProcessor.swift +++ b/framework/Source/iOS/PictureProcessor.swift @@ -7,6 +7,29 @@ import Foundation +/// Operation on input image, which will be translated into CIImage opereation +public enum PictureInputProcessStep { + public enum AnchorPoint { + // Default anchor point for CIImage + case originPoint + // CIImage.extent.center as anchor point + case extentCenter + // Custom anchor point + case custom(point: CGPoint) + } + /// Scale + case scale(x: CGFloat, y: CGFloat, anchorPoint: AnchorPoint) + /// Crop to rect. Rect values are from [0, 1] and base on the latest extend rect of the image after previous steps. + /// **isViewCoordinate** is true indicates zero point is Left-Top corner, false indicates zero point is Left-Bottom corner. + case crop(rect: CGRect, isViewCoordinate: Bool) + /// Rotate image by angle (unit: radian) + case rotation(angle: CGFloat, anchorPoint: AnchorPoint) + /// Remember the original extent rect, rotate image by angle (unit: radian), scale by ratio, then crop to original extent rect + case rotateScaleAndKeepRect(angle: CGFloat, scale: CGFloat, anchorPoint: AnchorPoint) + /// Scale and crop to match target size ratio + case resizeAspectRatio(size: CGSize, isFill: Bool, allowUpScale: Bool) +} + extension CIImage { /// Shared CIContext to improve performance static var glBackedContext = CIContext(eaglContext: sharedImageProcessingContext.context) @@ -25,9 +48,9 @@ extension CIImage { case let .crop(rect, isViewCoordinate): guard rect.origin != .zero || rect.size != CGSize(width: 1.0, height: 1.0) else { continue } // rasterized: [0, 1] -> [0, width/height] - let adjustedY: CGFloat = isViewCoordinate ? (1.0 - rect.maxY) : rect.origin.y - let rasterizedRect = CGRect(x: rect.origin.x * newImage.accurateExtent.size.width + newImage.accurateExtent.origin.x, - y: adjustedY * newImage.accurateExtent.size.height + newImage.accurateExtent.origin.y, + let adjustedY: CGFloat = isViewCoordinate ? (1.0 - rect.maxY) : rect.minY + let rasterizedRect = CGRect(x: rect.minX * newImage.accurateExtent.size.width + newImage.accurateExtent.minX, + y: adjustedY * newImage.accurateExtent.size.height + newImage.accurateExtent.minY, width: rect.size.width * newImage.accurateExtent.size.width, height: rect.size.height * newImage.accurateExtent.size.height).rounded() newImage = newImage.accurateCropped(to: rasterizedRect) @@ -46,14 +69,18 @@ extension CIImage { } newImage = newImage.accurateCropped(to: originExtent) case let .resizeAspectRatio(size, isFill, allowUpScale): - guard size != .zero else { continue } - let roundedCroppedUnscaleFrame: CGRect + guard size != newImage.accurateExtent.size && size != .zero else { continue } + // Crop to target size ratio, always use center point as anchor point when cropping + let targetRect = CGRect(x: newImage.accurateExtent.midX - size.width / 2, y: newImage.accurateExtent.midY - size.height / 2, width: size.width, height: size.height) + var roundedCroppedUnscaleFrame: CGRect + // NOTE: this operation needs reverse thinking. Fill: target rect fits original rect. Fit: target rect fill original rect. if isFill { - roundedCroppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).fitRect(inside: newImage.accurateExtent).rounded() + roundedCroppedUnscaleFrame = targetRect.fitRect(inside: newImage.accurateExtent).rounded() } else { - roundedCroppedUnscaleFrame = CGRect(x: 0, y: 0, width: size.width, height: size.height).aspectToFill(insideRect: newImage.accurateExtent).rounded() + roundedCroppedUnscaleFrame = targetRect.aspectToFill(insideRect: newImage.accurateExtent).rounded() } newImage = newImage.accurateCropped(to: roundedCroppedUnscaleFrame) + // Scale to target size if needed let scaleRatio = size.width / roundedCroppedUnscaleFrame.width if scaleRatio < 1 || allowUpScale { newImage = newImage.accurateTransformed(by: .init(scaleX: scaleRatio, y: scaleRatio)) @@ -89,8 +116,8 @@ extension CIImage { let sizeRoundedTransform = transform.scaledBy(x: transformedRect.rounded().width / transformedRect.width, y: transformedRect.rounded().height / transformedRect.height) let sizeRoundedRect = accurateExtent.applying(sizeRoundedTransform) let positionRoundedRect = sizeRoundedRect.rounded(.towardZero) - let positionRoundedTransform = sizeRoundedTransform.translatedBy(x: positionRoundedRect.origin.x - sizeRoundedRect.origin.x, - y: positionRoundedRect.origin.y - sizeRoundedRect.origin.y) + let positionRoundedTransform = sizeRoundedTransform.translatedBy(x: positionRoundedRect.minX - sizeRoundedRect.minX, + y: positionRoundedRect.minY - sizeRoundedRect.minY) transformedImage = transformed(by: positionRoundedTransform) transformedImage.accurateExtent = accurateExtent.applying(positionRoundedTransform) } else { @@ -112,7 +139,7 @@ extension CIImage { private static var _accurateExtentKey = 0 - // NOTE: CIImage.extend will always return an integral rect, so if we want the accurate rect after transforming, we need to apply transform on the original rect + // NOTE: CIImage.extend will sometimes return an integral rect, so if we want the accurate rect after transforming, we need to apply transform on the original rect var accurateExtent: CGRect { get { (objc_getAssociatedObject(self, &Self._accurateExtentKey) as? NSValue)?.cgRectValue ?? extent } set { objc_setAssociatedObject(self, &Self._accurateExtentKey, NSValue(cgRect: newValue), .OBJC_ASSOCIATION_RETAIN_NONATOMIC) } @@ -122,7 +149,7 @@ extension CIImage { var trimmedExtent: CGRect { let accurateExtent = accurateExtent if accurateExtent.integral != accurateExtent { - return CGRect(x: accurateExtent.origin.x + 1, y: accurateExtent.origin.y + 1, width: accurateExtent.size.width - 2, height: accurateExtent.size.height - 2) + return accurateExtent.rounded(.up).insetBy(dx: 1, dy: 1) } else { return accurateExtent } @@ -133,8 +160,8 @@ extension CGRect { func fitRect(inside rect: CGRect) -> CGRect { let scale = min(rect.width / width, rect.height / height) let scaledSize = size.applying(CGAffineTransform(scaleX: scale, y: scale)) - let fitX = (rect.width - scaledSize.width) / 2 + rect.origin.x - let fitY = (rect.height - scaledSize.height) / 2 + rect.origin.y + let fitX = (rect.width - scaledSize.width) / 2 + rect.minX + let fitY = (rect.height - scaledSize.height) / 2 + rect.minY return CGRect(origin: CGPoint(x: fitX, y: fitY), size: scaledSize) } @@ -148,13 +175,6 @@ extension CGRect { } func rounded(_ rule: FloatingPointRoundingRule = .toNearestOrAwayFromZero) -> CGRect { - return CGRect(x: origin.x.rounded(rule), y: origin.y.rounded(rule), width: size.width.rounded(rule), height: size.height.rounded(rule)) - } -} - -fileprivate extension CGAffineTransform { - func roundedXYTransform(for rect: CGRect) -> CGAffineTransform { - let transformedRect = rect.applying(self) - return translatedBy(x: transformedRect.origin.x.rounded(.towardZero) - transformedRect.origin.x, y: transformedRect.origin.y.rounded(.towardZero) - transformedRect.origin.y) + return CGRect(x: minX.rounded(rule), y: minY.rounded(rule), width: size.width.rounded(rule), height: size.height.rounded(rule)) } } From a357e94e977ad8b3edda11f31aa4e19fb7b70e4e Mon Sep 17 00:00:00 2001 From: Pinlin Date: Wed, 12 May 2021 20:16:14 +0800 Subject: [PATCH 328/332] improve: separate CIContext for GPU and CPU for better performance --- framework/Source/iOS/FramebufferGenerator.swift | 2 +- framework/Source/iOS/PictureInput.swift | 6 +++--- framework/Source/iOS/PictureProcessor.swift | 7 ++++--- framework/Source/iOS/RenderView.swift | 1 + 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/framework/Source/iOS/FramebufferGenerator.swift b/framework/Source/iOS/FramebufferGenerator.swift index 79725eaf..03a630bb 100644 --- a/framework/Source/iOS/FramebufferGenerator.swift +++ b/framework/Source/iOS/FramebufferGenerator.swift @@ -217,7 +217,7 @@ private extension FramebufferGenerator { glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE)) // TODO: this API performance is slower than Crop filter, improve this later - CIImage.glBackedContext.draw(processedImage, in: CGRect(origin: .zero, size: processedImage.accurateExtent.rounded(.towardZero).size), from: processedImage.accurateExtent.rounded(.towardZero)) + CIImage.ciGPUContext.draw(processedImage, in: CGRect(origin: .zero, size: processedImage.accurateExtent.rounded(.towardZero).size), from: processedImage.accurateExtent.rounded(.towardZero)) // debugPrint("Reneder CIImage to OpenGL texture. time: \(CACurrentMediaTime() - startTime)") diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 571cd70c..9cb4f68a 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -192,7 +192,7 @@ public class PictureInput: ImageSource { let displayFrame = CGRect(origin: CGPoint(x: renderTargetOffset.x * imageSize.width, y: renderTargetOffset.y * imageSize.height), size: renderTargetSize) // crop image to target display frame newImage = newImage.accurateCropped(to: displayFrame) - guard let newCgImage = newImage.renderToCGImage() else { + guard let newCgImage = newImage.renderToCGImage(onGPU: false) else { throw PictureInputError.createImageError } cgImage = newCgImage @@ -238,7 +238,7 @@ public class PictureInput: ImageSource { ] ]) } - guard let newCgImage = ciImage?.processed(with: processSteps).renderToCGImage() else { + guard let newCgImage = ciImage?.processed(with: processSteps).renderToCGImage(onGPU: false) else { throw PictureInputError.createImageError } croppedCGImage = newCgImage @@ -248,7 +248,7 @@ public class PictureInput: ImageSource { let ciImage = CIImage(image: image, options: [.applyOrientationProperty: true, .properties: [ kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue ]]), - let rotatedImage = ciImage.renderToCGImage() { + let rotatedImage = ciImage.renderToCGImage(onGPU: false) { // Rotated correct orientation croppedCGImage = rotatedImage } else { diff --git a/framework/Source/iOS/PictureProcessor.swift b/framework/Source/iOS/PictureProcessor.swift index 040ed9cf..307a8fe2 100644 --- a/framework/Source/iOS/PictureProcessor.swift +++ b/framework/Source/iOS/PictureProcessor.swift @@ -32,7 +32,8 @@ public enum PictureInputProcessStep { extension CIImage { /// Shared CIContext to improve performance - static var glBackedContext = CIContext(eaglContext: sharedImageProcessingContext.context) + static var ciGPUContext = CIContext(eaglContext: sharedImageProcessingContext.context) + static var ciCPUContext = CIContext() func processed(with processSteps: [PictureInputProcessStep]?) -> CIImage { guard let processSteps = processSteps, !processSteps.isEmpty else { return self } @@ -133,8 +134,8 @@ extension CIImage { return croppedImage } - func renderToCGImage() -> CGImage? { - return Self.glBackedContext.createCGImage(self, from: accurateExtent.rounded(.towardZero)) + func renderToCGImage(onGPU: Bool) -> CGImage? { + return (onGPU ? Self.ciGPUContext : Self.ciCPUContext).createCGImage(self, from: accurateExtent.rounded(.towardZero)) } private static var _accurateExtentKey = 0 diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index fa3e2c20..85843f5c 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -88,6 +88,7 @@ public class RenderView: UIView, ImageConsumer { } deinit { + debugPrint("RenderView deinit") let strongDisplayFramebuffer = displayFramebuffer let strongDisplayRenderbuffer = displayRenderbuffer sharedImageProcessingContext.runOperationAsynchronously { From bdb68a0450f9e7bb86b1fdb257a2970035f954bc Mon Sep 17 00:00:00 2001 From: Pinlin Date: Thu, 13 May 2021 17:54:37 +0800 Subject: [PATCH 329/332] chore: update code style with lint --- framework/Source/BasicOperation.swift | 4 ++-- framework/Source/Mac/Camera.swift | 6 +++--- framework/Source/Mac/PictureInput.swift | 2 +- framework/Source/Mac/PictureOutput.swift | 2 +- framework/Source/Pipeline.swift | 2 +- framework/Source/ShaderProgram.swift | 2 +- framework/Source/TwoStageOperation.swift | 2 +- framework/Source/iOS/Camera.swift | 8 ++++---- framework/Source/iOS/PictureInput.swift | 2 +- framework/Source/iOS/PictureOutput.swift | 4 ++-- framework/Source/iOS/RenderView.swift | 2 +- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 44974d7b..79f372e8 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -16,7 +16,7 @@ open class BasicOperation: ImageProcessingOperation { public var overriddenOutputSize: Size? public var overriddenOutputRotation: Rotation? public var backgroundColor = Color.black - public var drawUnmodifiedImageOutsideOfMask: Bool = true + public var drawUnmodifiedImageOutsideOfMask = true public var mask: ImageSource? { didSet { if let mask = mask { @@ -33,7 +33,7 @@ open class BasicOperation: ImageProcessingOperation { } } } - public var activatePassthroughOnNextFrame: Bool = false + public var activatePassthroughOnNextFrame = false public var uniformSettings = ShaderUniformSettings() // MARK: - diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index 347d34b3..65d59e0a 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -5,8 +5,8 @@ let initialBenchmarkFramesToIgnore = 5 public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { public var orientation: ImageOrientation - public var runBenchmark: Bool = false - public var logFPS: Bool = false + public var runBenchmark = false + public var logFPS = false public var audioEncodingTarget: AudioEncodingTarget? { didSet { guard let audioEncodingTarget = audioEncodingTarget else { @@ -31,7 +31,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var audioInput: AVCaptureDeviceInput? var audioOutput: AVCaptureAudioDataOutput? - var supportsFullYUVRange: Bool = false + var supportsFullYUVRange = false let captureAsYUV: Bool let yuvConversionShader: ShaderProgram? let frameRenderingSemaphore = DispatchSemaphore(value: 1) diff --git a/framework/Source/Mac/PictureInput.swift b/framework/Source/Mac/PictureInput.swift index 91ce44c5..bd5d045c 100755 --- a/framework/Source/Mac/PictureInput.swift +++ b/framework/Source/Mac/PictureInput.swift @@ -4,7 +4,7 @@ import Cocoa public class PictureInput: ImageSource { public let targets = TargetContainer() var imageFramebuffer: Framebuffer! - var hasProcessedImage: Bool = false + var hasProcessedImage = false public init(image: CGImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation = .portrait) { // TODO: Dispatch this whole thing asynchronously to move image loading off main thread diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index 343e91c9..138e6122 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -10,7 +10,7 @@ public class PictureOutput: ImageConsumer { public var encodedImageAvailableCallback: ((Data) -> Void)? public var encodedImageFormat: PictureFileFormat = .png public var imageAvailableCallback: ((NSImage) -> Void)? - public var onlyCaptureNextFrame: Bool = true + public var onlyCaptureNextFrame = true public let sources = SourceContainer() public let maximumInputs: UInt = 1 diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 217456d7..b422dec3 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -305,7 +305,7 @@ public class ImageRelay: ImageProcessingOperation { public let sources = SourceContainer() public let targets = TargetContainer() public let maximumInputs: UInt = 1 - public var preventRelay: Bool = false + public var preventRelay = false public init() { } diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index 035f4a1d..c65cb75f 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -25,7 +25,7 @@ enum ShaderType { public class ShaderProgram { public var colorUniformsUseFourComponents = false - public static var disableAttributeCache: Bool = false + public static var disableAttributeCache = false let program: GLuint let initTime: CFTimeInterval? var vertexShader: GLuint! // At some point, the Swift compiler will be able to deal with the early throw and we can convert these to lets diff --git a/framework/Source/TwoStageOperation.swift b/framework/Source/TwoStageOperation.swift index a5b22735..0c220660 100644 --- a/framework/Source/TwoStageOperation.swift +++ b/framework/Source/TwoStageOperation.swift @@ -1,5 +1,5 @@ open class TwoStageOperation: BasicOperation { - public var overrideDownsamplingOptimization: Bool = false + public var overrideDownsamplingOptimization = false // override var outputFramebuffer:Framebuffer { get { return Framebuffer } } diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 25145ac2..7a784823 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -66,8 +66,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer configureDeviceInput(location: location, deviceType: deviceType) } } - public var runBenchmark: Bool = false - public var logFPS: Bool = false + public var runBenchmark = false + public var logFPS = false public var audioEncodingTarget: AudioEncodingTarget? { didSet { guard let audioEncodingTarget = audioEncodingTarget else { @@ -94,7 +94,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var microphone: AVCaptureDevice? public var audioInput: AVCaptureDeviceInput? public var audioOutput: AVCaptureAudioDataOutput? - public var dontDropFrames: Bool = false + public var dontDropFrames = false public var deviceType: AVCaptureDevice.DeviceType { return inputCamera.deviceType } @@ -113,7 +113,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - var supportsFullYUVRange: Bool = false + var supportsFullYUVRange = false let captureAsYUV: Bool let yuvConversionShader: ShaderProgram? let frameRenderingSemaphore = DispatchSemaphore(value: 1) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 9cb4f68a..5559d9e3 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -30,7 +30,7 @@ public class PictureInput: ImageSource { public private(set) var imageFramebuffer: Framebuffer? public var framebufferUserInfo: [AnyHashable: Any]? public let imageName: String - var hasProcessedImage: Bool = false + var hasProcessedImage = false #if DEBUG public var printDebugRenderInfos = true public var debugRenderInfo: String = "" diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index a95a7b88..4974219f 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -12,8 +12,8 @@ public class PictureOutput: ImageConsumer { public var encodedJPEGImageCompressionQuality: CGFloat = 0.8 public var imageAvailableCallback: ((UIImage) -> Void)? public var cgImageAvailableCallback: ((CGImage) -> Void)? - public var onlyCaptureNextFrame: Bool = true - public var keepImageAroundForSynchronousCapture: Bool = false + public var onlyCaptureNextFrame = true + public var keepImageAroundForSynchronousCapture = false public var exportWithAlpha = false var storedFramebuffer: Framebuffer? diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 85843f5c..71743496 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -26,7 +26,7 @@ public class RenderView: UIView, ImageConsumer { var displayRenderbuffer: GLuint? var backingSize = GLSize(width: 0, height: 0) var renderSize = CGSize.zero - private var isAppForeground: Bool = true + private var isAppForeground = true private lazy var displayShader: ShaderProgram = { return sharedImageProcessingContext.passthroughShader From b8fb7ea267b151359ebfe6ac2f97ab65db7695db Mon Sep 17 00:00:00 2001 From: Pinlin Date: Sun, 16 May 2021 00:10:55 +0800 Subject: [PATCH 330/332] fix(framebuffer): use correct orientation for CIImage process --- framework/Source/iOS/MovieInput.swift | 2 +- framework/Source/iOS/MoviePlayer.swift | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index cef4984a..7fbf5f1c 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -467,7 +467,7 @@ public class MovieInput: ImageSource { let startTime = CACurrentMediaTime() var outputFramebuffer: Framebuffer? if let processSteps = processSteps, !processSteps.isEmpty { - outputFramebuffer = framebufferGenerator.processAndGenerateFromBuffer(movieFrame, frameTime: withSampleTime, processSteps: processSteps, videoOrientation: videoOrientation) + outputFramebuffer = framebufferGenerator.processAndGenerateFromBuffer(movieFrame, frameTime: withSampleTime, processSteps: processSteps, videoOrientation: asset.originalOrientation ?? .portrait) } else { outputFramebuffer = framebufferGenerator.generateFromYUVBuffer(movieFrame, frameTime: withSampleTime, videoOrientation: videoOrientation) } diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index d7b569bb..f724bfe3 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -625,7 +625,7 @@ private extension MoviePlayer { guard hasTarget else { return } let newFramebuffer: Framebuffer? if let processSteps = processSteps, !processSteps.isEmpty { - newFramebuffer = framebufferGenerator.processAndGenerateFromBuffer(pixelBuffer, frameTime: timeForDisplay, processSteps: processSteps, videoOrientation: videoOrientation) + newFramebuffer = framebufferGenerator.processAndGenerateFromBuffer(pixelBuffer, frameTime: timeForDisplay, processSteps: processSteps, videoOrientation: asset?.originalOrientation ?? .portrait) } else { newFramebuffer = framebufferGenerator.generateFromYUVBuffer(pixelBuffer, frameTime: timeForDisplay, videoOrientation: videoOrientation) } From 7f82c51f214d12464cb2b1628e66189cb33027e7 Mon Sep 17 00:00:00 2001 From: Herry Zhong Date: Fri, 14 May 2021 17:15:09 +0800 Subject: [PATCH 331/332] delete unused PictureInput init because already has a generic PictureInput init --- framework/Source/iOS/PictureInput.swift | 45 --------------------- framework/Source/iOS/PictureProcessor.swift | 2 +- 2 files changed, 1 insertion(+), 46 deletions(-) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 5559d9e3..5636dc45 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -172,52 +172,7 @@ public class PictureInput: ImageSource { guard let image = UIImage(named: imageName) else { throw PictureInputError.noSuchImageError(imageName: imageName) } try self.init(image: image.cgImage!, imageName: imageName, smoothlyScaleOutput: smoothlyScaleOutput, orientation: orientation ?? image.imageOrientation.gpuOrientation) } - - public convenience init(image: UIImage, imageSize: CGSize, renderTargetSize: CGSize, renderTargetOffset: CGPoint, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil) throws { - #if DEBUG - let startTime = CACurrentMediaTime() - #endif - - var targetOrientation = orientation ?? image.imageOrientation.gpuOrientation - var cgImage: CGImage = image.cgImage! - try autoreleasepool { - let options: [CIImageOption: Any] = [.applyOrientationProperty: true, - .properties: [kCGImagePropertyOrientation: image.imageOrientation.cgImageOrientation.rawValue]] - var newImage = CIImage(cgImage: cgImage, options: options) - // scale to image size - let ratioW = imageSize.width / image.size.width - let ratioH = imageSize.height / image.size.height - let fillRatio = max(ratioW, ratioH) - newImage = newImage.accurateTransformed(by: .init(scaleX: fillRatio, y: fillRatio)) - let displayFrame = CGRect(origin: CGPoint(x: renderTargetOffset.x * imageSize.width, y: renderTargetOffset.y * imageSize.height), size: renderTargetSize) - // crop image to target display frame - newImage = newImage.accurateCropped(to: displayFrame) - guard let newCgImage = newImage.renderToCGImage(onGPU: false) else { - throw PictureInputError.createImageError - } - cgImage = newCgImage - targetOrientation = orientation ?? .portrait - } - - let preprocessRenderInfo: String - #if DEBUG - preprocessRenderInfo = """ -{ - PictureInput_pre_process : { - input: { - size: \(image.size.debugRenderInfo), type: UIImage, imageSize:\(imageSize.debugRenderInfo), renderTargetSize: \(renderTargetSize.debugRenderInfo), renderTargetOffset: \(renderTargetOffset.debugDescription) - }, - output: { size: \(cgImage.width)x\(cgImage.height), type: CGImage }, - time: \((CACurrentMediaTime() - startTime) * 1000.0)ms -}, -""" - #else - preprocessRenderInfo = "" - #endif - try self.init(image: cgImage, imageName: "UIImage", smoothlyScaleOutput: smoothlyScaleOutput, orientation: targetOrientation, preprocessRenderInfo: preprocessRenderInfo) - } - public convenience init(image: UIImage, smoothlyScaleOutput: Bool = false, orientation: ImageOrientation? = nil, processSteps: [PictureInputProcessStep]? = nil) throws { #if DEBUG let startTime = CACurrentMediaTime() diff --git a/framework/Source/iOS/PictureProcessor.swift b/framework/Source/iOS/PictureProcessor.swift index 307a8fe2..fe7c2b39 100644 --- a/framework/Source/iOS/PictureProcessor.swift +++ b/framework/Source/iOS/PictureProcessor.swift @@ -35,7 +35,7 @@ extension CIImage { static var ciGPUContext = CIContext(eaglContext: sharedImageProcessingContext.context) static var ciCPUContext = CIContext() - func processed(with processSteps: [PictureInputProcessStep]?) -> CIImage { + public func processed(with processSteps: [PictureInputProcessStep]?) -> CIImage { guard let processSteps = processSteps, !processSteps.isEmpty else { return self } var newImage = self for step in processSteps { From c4130f5224ff3b1f53f6a11cbe5f877776fe8a24 Mon Sep 17 00:00:00 2001 From: Kubrick G <513776985@qq.com> Date: Tue, 1 Jun 2021 14:01:02 +0800 Subject: [PATCH 332/332] improve(player): cache asset duration. --- framework/Source/iOS/MoviePlayer.swift | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/framework/Source/iOS/MoviePlayer.swift b/framework/Source/iOS/MoviePlayer.swift index f724bfe3..8118a756 100644 --- a/framework/Source/iOS/MoviePlayer.swift +++ b/framework/Source/iOS/MoviePlayer.swift @@ -56,9 +56,18 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { var totalFrameTime: Double = 0.0 public var dropFrameBeforeTime: CMTime? public var playrate: Float = 1.0 + private lazy var assetDurationMap = [AVAsset: CMTime]() public var assetDuration: CMTime { - if asset?.statusOfValue(forKey: "duration", error: nil) == .loaded { - return asset?.duration ?? .zero + if let currentAsset = asset { + if let cachedDuration = assetDurationMap[currentAsset] { + return cachedDuration + } else if currentAsset.statusOfValue(forKey: "duration", error: nil) == .loaded { + let duration = currentAsset.duration + assetDurationMap[currentAsset] = duration + return duration + } else { + return .zero + } } else { return .zero } @@ -346,6 +355,7 @@ public class MoviePlayer: AVQueuePlayer, ImageSource { isSeeking = false nextSeeking = nil dropFrameBeforeTime = nil + assetDurationMap.removeAll() MoviePlayer.looperDict[self]?.disableLooping() MoviePlayer.looperDict[self] = nil }