I use RPScreenRecorder.shared().startCapture fo screen recording and encode into h264 video file using AVAssetWriterInput but it gives me direct .mp4 and i want h264 video file frame by frame while recording screen for streaming. is there any way access that sample buffer data which came from RPScreenRecorder.shared().startCapture? here is the code. here i get whole mp4 file but i want only frames of video
import Foundation
import ReplayKit
import AVKit
class ScreenRecorder
{
var assetWriter:AVAssetWriter!
var videoInput:AVAssetWriterInput!
let viewOverlay = WindowUtil()
let fileNameTxt = "Test"
let dir = try? FileManager.default.url(for: .documentDirectory,
in: .userDomainMask, appropriateFor: nil, create: true)
var sampleFileBuffer : String = ""
//MARK: Screen Recording
func startRecording(withFileName fileName: String, recordingHandler:@escaping (Error?)-> Void)
{
if #available(iOS 11.0, *)
{
let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
assetWriter = try! AVAssetWriter(outputURL: fileURL, fileType:
AVFileType.mp4)
let videoOutputSettings: Dictionary<String, Any> = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : UIScreen.main.bounds.size.width,
AVVideoHeightKey : UIScreen.main.bounds.size.height
];
videoInput = AVAssetWriterInput (mediaType: AVMediaType.video, outputSettings: videoOutputSettings)
videoInput.expectsMediaDataInRealTime = true
assetWriter.add(videoInput)
// If the directory was found, we write a file to it and read it back
let fileURLTxt = dir?.appendingPathComponent(fileNameTxt).appendingPathExtension("txt")
RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
//print(sample, bufferType, error)
recordingHandler(error)
if CMSampleBufferDataIsReady(sample)
{
if self.assetWriter.status == AVAssetWriterStatus.unknown
{
self.assetWriter.startWriting()
self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
}
if self.assetWriter.status == AVAssetWriterStatus.failed {
print("Error occured, status = \(self.assetWriter.status.rawValue), \(self.assetWriter.error!.localizedDescription) \(String(describing: self.assetWriter.error))")
return
}
if (bufferType == .video)
{
if self.videoInput.isReadyForMoreMediaData
{
self.videoInput.append(sample)
// self.sampleFileBuffer = self.videoInput as! String
self.sampleFileBuffer = String(sample as! String) //sample as! String
do {
try self.sampleFileBuffer.write(to: fileURLTxt!, atomically: true, encoding: .utf8)
} catch {
print("Failed writing to URL: \(fileURLTxt), Error: " + error.localizedDescription)
}
}
}
self.sampleFileBuffer = ""
}
}) { (error) in
recordingHandler(error)
}
} else
{
// Fallback on earlier versions
}
}
func stopRecording(handler: @escaping (Error?) -> Void)
{
if #available(iOS 11.0, *)
{
RPScreenRecorder.shared().stopCapture
{ (error) in
handler(error)
self.assetWriter.finishWriting
{
print(ReplayFileUtil.fetchAllReplays())
}
}
}
}
}
On your code, sample is CMSampleBuffer. Call CMSampleBufferGetImageBuffer() and get CVImageBuffer. For locking frame buffer call CVPixelBufferLockBaseAddress(imageBuffer). On my case, imageBuffer has 2 planes, Y and UV. Call CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0) and get Y plane address. Call same API with planeIndex=1 and get UV plane address.
Once you get plane's base address, you can read as uint8*. Call CVPixelBufferGetXXX API for getting width, height, byte-per-row. Do not forget call CVPixelBufferUnlockBaseAddress.