admin管理员组文章数量:1430974
I'm trying to develop a video camera with "live" effects. The process is based on AVCaptureVideoDataOutputSampleBufferDelegate(), MTKView() and a computing pipeline. As long as the application is running and recording, no problems arise. The problems start when I play the finished video. Unpleasant artifacts appear on it, which it is unclear how to remove.
This is what the captureOutput function looks like:
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard CMSampleBufferDataIsReady(sampleBuffer) else { return }
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if connection == hardware.videoOut.connection(with: .video) {
self.sampleBuffer = sampleBuffer
self.hardware.preview.draw()
if self.focusImageVisibleSeconds > 0 {
if let buffer = sampleBuffer.imageBuffer {
var ciImage = CIImage(cvImageBuffer: buffer)
let center: CGPoint = CGPoint(x: ciImage.extent.size.width/2, y: ciImage.extent.size.height/2)
let point = CGPoint(x: center.x - 100, y: center.y - 100)
let size = CGSize(width: 200, height: 200)
ciImage = ciImage.cropped(to: CGRect(origin: point, size: size))
let image = self.hardware.context.createCGImage(ciImage, from: ciImage.extent)
DispatchQueue.main.async { self.focusImage = image }
}
self.focusImageVisibleSeconds -= self.activeDevice?.activeVideoMinFrameDuration.seconds ?? 0
} else if self.focusImage != nil {
DispatchQueue.main.async { self.focusImage = nil }
}
}
guard canWrite() else { return }
if let sessionAtSourceTime = sessionAtSourceTime {
DispatchQueue.main.async { [weak self] in
guard let self else { return }
let duration = Int(timestamp.seconds - sessionAtSourceTime.seconds)
if self.duration < duration { self.duration = duration }
}
} else {
assetWriter?.startSession(atSourceTime: timestamp)
sessionAtSourceTime = timestamp
}
if connection == hardware.audioOut.connection(with: .audio) && self.aInput?.isReadyForMoreMediaData == true {
self.hardware.writeQueue.async {
self.aInput?.append(sampleBuffer)
}
}
}
This is how the draw() MetalView() function looks like:
func draw(in view: MTKView) {
guard let sampleBuffer = sampleBuffer, let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let stamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let width = CVPixelBufferGetWidth(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
var luminanceCVTexture: CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, hardware.textureCache!, imageBuffer, nil, .r8Unorm, width, height, 0, &luminanceCVTexture)
var crominanceCVTexture: CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, hardware.textureCache!, imageBuffer, nil, .rg8Unorm, width/2, height/2, 1, &crominanceCVTexture)
guard let luminanceCVTexture = luminanceCVTexture,
let inputLuminance = CVMetalTextureGetTexture(luminanceCVTexture),
let crominanceCVTexture = crominanceCVTexture,
let inputCrominance = CVMetalTextureGetTexture(crominanceCVTexture)
else { return }
DispatchQueue.main.async {
self.hardware.preview.drawableSize = CGSize(width: width, height: height)
}
guard let drawable: CAMetalDrawable = self.hardware.preview.currentDrawable else { return }
guard let commandBuffer = hardwaremandQueue.makeCommandBuffer(), let computeCommandEncoder = commandBuffer.makeComputeCommandEncoder() else { return }
computeCommandEncoder.setComputePipelineState(hardwareputePipelineState)
computeCommandEncoder.setTexture(inputLuminance, index: 0)
computeCommandEncoder.setTexture(inputCrominance, index: 1)
computeCommandEncoder.setTexture(drawable.texture, index: 2)
if let cubeBuffer = self.cubeBuffer {
computeCommandEncoder.setBuffer(cubeBuffer.0, offset: 0, index: 0)
computeCommandEncoder.setBuffer(cubeBuffer.1, offset: 0, index: 1)
} else {
let lutSizeBuffer = self.hardware.device.makeBuffer(bytes: [2], length: MemoryLayout<Int>.size)
computeCommandEncoder.setBuffer(lutSizeBuffer, offset: 0, index: 0)
let lutBuffer = self.hardware.device.makeBuffer(bytes: neutralLutArray, length: neutralLutArray.count * MemoryLayout<SIMD4<Float>>.stride, options: [])
computeCommandEncoder.setBuffer(lutBuffer, offset: 0, index: 1)
}
computeCommandEncoder.setBytes([Float(self.showNoise ? 1.0 : 0.0)], length: 1 * MemoryLayout<Float>.size, index: 2)
computeCommandEncoder.setBytes([Float(stamp.seconds)], length: 1 * MemoryLayout<Float>.size, index: 3)
computeCommandEncoder.dispatchThreadgroups(inputLuminance.threadGroups(), threadsPerThreadgroup: inputLuminance.threadGroupCount())
computeCommandEncoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.addCompletedHandler { buffer in
self.hardware.writeQueue.async {
guard let adaptor = self.adaptor else { return }
guard self.isWriting && self.assetWriter?.status == .writing && self.sessionAtSourceTime != nil && self.vInput?.isReadyForMoreMediaData == true
else { return }
var pixelBuffer: CVPixelBuffer?
let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, adaptor.pixelBufferPool!, &pixelBuffer)
guard let pixelBuffer = pixelBuffer, pixelBufferStatus == kCVReturnSuccess else { return }
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let lumaBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0)
let chromaBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1)
guard let liminanceBytes = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0),
let chrominanceBytes = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1)
else { return }
inputLuminance.getBytes(liminanceBytes, bytesPerRow: lumaBytesPerRow, from: MTLRegionMake2D(0, 0, inputLuminance.width, inputLuminance.height), mipmapLevel: 0)
inputCrominance.getBytes(chrominanceBytes, bytesPerRow: chromaBytesPerRow, from: MTLRegionMake2D(0, 0, inputCrominance.width, inputCrominance.height), mipmapLevel: 0)
if (!adaptor.append(pixelBuffer, withPresentationTime: stamp)) { print("Problem appending pixel buffer at time: \(stamp)") }
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
}
}
commandBuffermit()
}
Here is the process of setting up AVAssetWriter:
func setupWriter(for range: VideoDynamicRange, with format: VideoFormat) {
guard let activeDevice = self.activeDevice else { return }
var format = format
guard let width = hardware.videoOut.videoSettings["Width"] as? Int else { return }
guard let height = hardware.videoOut.videoSettings["Height"] as? Int else { return }
let frameRate = Int(1/activeDevice.activeVideoMinFrameDuration.seconds)
if height > 2100 && frameRate > 30 { format = .hevc }
let url = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("\(UUID().uuidString).\(format == .mp4 ? "MP4" : "MOV")")
guard var vSettings = self.hardware.videoOut.recommendedVideoSettingsForAssetWriter(writingTo: format == .mp4 ? .mp4 : .mov) else { return }
var compressionSettings: [String: Any] = vSettings["AVVideoCompressionPropertiesKey"] as! [String: Any]
compressionSettings["ExpectedFrameRate"] = frameRate
compressionSettings["AverageBitRate"] = Int(0.125/activeDevice.activeVideoMinFrameDuration.seconds)*width*height
switch range {
case .sdr:
compressionSettings["ProfileLevel"] = kVTProfileLevel_HEVC_Main_AutoLevel
vSettings[AVVideoColorPropertiesKey] = [AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2,
AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_709_2,
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2]
case .hdr10:
compressionSettings["ProfileLevel"] = kVTProfileLevel_HEVC_Main10_AutoLevel
vSettings[AVVideoColorPropertiesKey] = [AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_2020,
AVVideoTransferFunctionKey: AVVideoTransferFunction_SMPTE_ST_2084_PQ,
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_2020]
case .dolbyVision:
compressionSettings["ProfileLevel"] = kVTProfileLevel_HEVC_Main10_AutoLevel
vSettings[AVVideoColorPropertiesKey] = [AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_2020,
AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_2100_HLG,
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_2020]
}
vSettings["AVVideoCompressionPropertiesKey"] = compressionSettings
do {
self.assetWriter = try AVAssetWriter(url: url, fileType: format == .mp4 ? AVFileType.mp4 : AVFileType.mov)
self.assetWriter?.metadata = self.makeAVMetaData(with: location)
print(self.assetWriter as Any)
//Add video input
self.vInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: vSettings)
self.vInput?.expectsMediaDataInRealTime = true
self.vInput?.mediaTimeScale = CMTimeScale(600)
switch self.orientation {
case .portraitUpsideDown: vInput?.transform = CGAffineTransform(rotationAngle: .pi)
case .landscapeRight: vInput?.transform = CGAffineTransform(rotationAngle: .pi*3/2)
case .landscapeLeft: vInput?.transform = CGAffineTransform(rotationAngle: .pi/2)
default: vInput?.transform = CGAffineTransform(rotationAngle: 0)
}
guard vInput != nil else { return }
let sourcePixelBufferAttributes:[String:AnyObject] = [
kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)),
kCVPixelBufferWidthKey as String:NSNumber(value: width),
kCVPixelBufferHeightKey as String:NSNumber(value: height)
]
adaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: vInput!, sourcePixelBufferAttributes: sourcePixelBufferAttributes)
if self.assetWriter?.canAdd(vInput!) == true { assetWriter?.add(vInput!) }
//Add audio input
if let aSettings = self.hardware.audioOut.recommendedAudioSettingsForAssetWriter(writingTo: format == .mp4 ? .mp4 : .mov) {
self.aInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: aSettings)
self.aInput?.expectsMediaDataInRealTime = true
guard aInput != nil else { return }
if self.assetWriter?.canAdd(aInput!) == true { assetWriter?.add(aInput!) }
}
self.assetWriter?.startWriting()
} catch {
debugPrint(error.localizedDescription)
}
}
Link to the GitHub repository:
I will be very grateful for any help
PS: I use biplane textures and metal to save device resources. When using BGRA CMSampleBuffer or Core Image, the device heats up quickly.
本文标签: swiftArtifacts of the AVAssetWriter() record resultStack Overflow
版权声明:本文标题:swift - Artifacts of the AVAssetWriter() record result - Stack Overflow 内容由网友自发贡献,该文观点仅代表作者本人, 转载请联系作者并注明出处:http://www.betaflare.com/web/1745555913a2663184.html, 本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌抄袭侵权/违法违规的内容,一经查实,本站将立刻删除。
发表评论