I am trying to extract CMSampleBuffer
from an mp3 file to use it for my recording video. Here is the code:
First get PCM buffer from mp3
guard let audioPCMBuffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat,
frameCapacity: 1) else {
cancel()
return
}
try audioFile.read(into: audioPCMBuffer, frameCount: 1)
if let audioSampleBuffer = this.processAudioData(audioData: audioPCMBuffer.audioBufferList, audioFormat: audioDataFormat.formatDescription, timingInfo: timingInfo, framesNumber: audioPCMBuffer.frameLength, mono: audioPCMBuffer.audioBufferList.pointee.mNumberBuffers == 1) {
this.videoWriterInput?.append(buffer)
this.audioWriterInput?.append(audioSampleBuffer)
} else {
// show error?
}
The code how I convert pcm buffer to cmsamplebuffer:
var sbuf : CMSampleBuffer?
var status : OSStatus?
var format: CMFormatDescription?
let audioFormat = CMAudioFormatDescriptionGetStreamBasicDescription(inputFormat)
var timing = timingInfo
var acl = AudioChannelLayout();
bzero(&acl, MemoryLayout<AudioChannelLayout>.size);
acl.mChannelLayoutTag = mono ? kAudioChannelLayoutTag_Mono : kAudioChannelLayoutTag_Stereo;
status = CMAudioFormatDescriptionCreate(allocator: kCFAllocatorDefault, asbd: audioFormat!, layoutSize: MemoryLayout<AudioChannelLayout>.size, layout: &acl, magicCookieSize: 0, magicCookie: nil, extensions: nil, formatDescriptionOut: &format)
if status != noErr {
print("Error CMAudioFormatDescriptionCreater :\(String(describing: status?.description))")
return nil
}
status = CMSampleBufferCreate(allocator: kCFAllocatorDefault, dataBuffer: nil, dataReady: false, makeDataReadyCallback: nil, refcon: nil, formatDescription: format, sampleCount: CMItemCount(framesNumber), sampleTimingEntryCount: 1, sampleTimingArray: &timing, sampleSizeEntryCount: 0, sampleSizeArray: nil, sampleBufferOut: &sbuf)
if status != noErr {
print("Error CMSampleBufferCreate :\(String(describing: status?.description))")
return nil
}
guard let buf = sbuf else { return nil}
status = CMSampleBufferSetDataBufferFromAudioBufferList(buf, blockBufferAllocator: kCFAllocatorDefault, blockBufferMemoryAllocator: kCFAllocatorDefault, flags: 0, bufferList: audioData)
let audiobufferListRaw: AudioBufferList = audioData.pointee
if status != noErr {
print("Error cCMSampleBufferSetDataBufferFromAudioBufferList :\(String(describing: status?.description))")
return nil
}
return buf
But I always get the error code: -12771: kCMSimpleQueueError_RequiredParameterMissing
. Here is the log of my audio format and the sample buffer:
(lldb) po self.audioFile!.processingFormat
<AVAudioFormat 0x1c4286090: 1 ch, 44100 Hz, Float32>
(lldb) po audiobufferListRaw
▿ AudioBufferList
- mNumberBuffers : 1
▿ mBuffers : AudioBuffer
- mNumberChannels : 1
- mDataByteSize : 4
▿ mData : Optional<UnsafeMutableRawPointer>
▿ some : 0x00000001c402c5e0
- pointerValue : 7583483360
(lldb) po buf
CMSampleBuffer 0x141d1b6a0 retainCount: 9 allocator: 0x1b5f76240
invalid = NO
dataReady = NO
makeDataReadyCallback = 0x0
makeDataReadyRefcon = 0x0
formatDescription = <CMAudioFormatDescription 0x1c4114e20 [0x1b5f76240]> {
mediaType:'soun'
mediaSubType:'lpcm'
mediaSpecific: {
ASBD: {
mSampleRate: 44100.000000
mFormatID: 'lpcm'
mFormatFlags: 0x29
mBytesPerPacket: 4
mFramesPerPacket: 1
mBytesPerFrame: 4
mChannelsPerFrame: 1
mBitsPerChannel: 32 }
cookie: {(null)}
ACL: {Mono}
FormatList Array: {(null)}
}
extensions: {(null)}
}
sbufToTrackReadiness = 0x0
numSamples = 1
sampleTimingArray[1] = {
{PTS = {253696201314000/1000000000 = 253696.201}, DTS = {INVALID}, duration = {INVALID}},
}
dataBuffer = 0x0
Any idea why CMSampleBufferSetDataBufferFromAudioBufferList
always returns error