diff --git a/Sources/GPUImage/MetalRendering.swift b/Sources/GPUImage/MetalRendering.swift index 401eebf9..58849287 100644 --- a/Sources/GPUImage/MetalRendering.swift +++ b/Sources/GPUImage/MetalRendering.swift @@ -25,7 +25,13 @@ extension MTLCommandBuffer { renderEncoder.endEncoding() } - + func copyTexture(from:Texture,fromSize:Size,to:Texture){ + guard let blitEncoder = self.makeBlitCommandEncoder() else { + fatalError("Could not create render encoder") + } + blitEncoder.copy(from: from.texture, sourceSlice: 0, sourceLevel: 0, sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0), sourceSize: MTLSize(width: Int(round(fromSize.width)), height: Int(round(fromSize.height)), depth: 1), to: to.texture, destinationSlice: 0, destinationLevel: 0, destinationOrigin: .init(x: 0, y: 0, z: 0)) + blitEncoder.endEncoding(); + } func renderQuad( pipelineState: MTLRenderPipelineState, uniformSettings: ShaderUniformSettings? = nil, inputTextures: [UInt: Texture], useNormalizedTextureCoordinates: Bool = true, diff --git a/Sources/GPUImage/MovieOutput.swift b/Sources/GPUImage/MovieOutput.swift index b31b1832..a206b3f3 100644 --- a/Sources/GPUImage/MovieOutput.swift +++ b/Sources/GPUImage/MovieOutput.swift @@ -184,7 +184,14 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { commandBuffer?.commit() commandBuffer?.waitUntilCompleted() } else { - outputTexture = texture + let commandBuffer = sharedMetalRenderingDevice.commandQueue.makeCommandBuffer() + outputTexture = Texture( + device: sharedMetalRenderingDevice.device, orientation: .portrait, + width: Int(round(self.size.width)), height: Int(round(self.size.height)), + timingStyle: texture.timingStyle) + commandBuffer?.copyTexture(from: texture, fromSize: self.size, to: outputTexture) + commandBuffer?.commit() + commandBuffer?.waitUntilCompleted() } let region = MTLRegionMake2D( diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift index 17ea0aca..cc6fc814 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift @@ -13,7 +13,7 @@ class ViewController: UIViewController { super.viewDidLoad() do { - camera = try Camera(sessionPreset: .vga640x480) + camera = try Camera(sessionPreset: .hd4K3840x2160) camera.runBenchmark = true filter = SaturationAdjustment() camera --> filter --> renderView @@ -40,7 +40,7 @@ class ViewController: UIViewController { } movieOutput = try MovieOutput( - URL: fileURL, size: Size(width: 480, height: 640), liveVideo: true) + URL: fileURL, size: Size(width: 2160, height: 3840), liveVideo: true) // camera.audioEncodingTarget = movieOutput filter --> movieOutput! movieOutput!.startRecording()