在使用 GPUImage3 examples -> iOS ->SimpleMovieFilter 读取视频时 发现内存爆增 查找发现 是 MovieInput 的锅
修改后的代码
func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) {
autoreleasepool {
........
}
}
添加 autoreleasepool 能让在autoreleasepool 创建的对象及时被释放
示例代码:我这里被没有需要 yuv 转 RBG 所以直接获取纹理 注释掉了多余的代码
func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) {
autoreleasepool {
let bufferHeight = CVPixelBufferGetHeight(movieFrame)
let bufferWidth = CVPixelBufferGetWidth(movieFrame)
CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
let conversionMatrix = colorConversionMatrix601FullRangeDefault
// TODO: Get this color query working
// if let colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, nil) {
// if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == .EqualTo) {
// _preferredConversion = kColorConversion601FullRange
// } else {
// _preferredConversion = kColorConversion709
// }
// } else {
// _preferredConversion = kColorConversion601FullRange
// }
let startTime = CFAbsoluteTimeGetCurrent()
let texture:Texture?
var luminanceTextureRef:CVMetalTexture? = nil
// var chrominanceTextureRef:CVMetalTexture? = nil
// Luminance plane
let _ = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.videoTextureCache!, movieFrame, nil, .bgra8Unorm, bufferWidth, bufferHeight, 0, &luminanceTextureRef)
// Chrominance plane
//let _ = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.videoTextureCache!, movieFrame, nil, .rg8Unorm, bufferWidth / 2, bufferHeight / 2, 1, &chrominanceTextureRef)
if let concreteLuminanceTextureRef = luminanceTextureRef,
let luminanceTexture = CVMetalTextureGetTexture(concreteLuminanceTextureRef) {
let outputTexture = Texture.init(orientation:.portrait, texture: luminanceTexture)
texture = outputTexture
} else {
texture = nil
}
CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
if texture != nil {
self.updateTargetsWithTexture(texture!)
}
if self.runBenchmark {
let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime)
self.numberOfFramesCaptured += 1
self.totalFrameTimeDuringCapture += currentFrameTime
print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured)) ms")
print("Current frame time : \(1000.0 * currentFrameTime) ms")
}
}
}