currently the code below can stream local mp3 files, so if i call
audio.scheduleFile(NSBundle.mainBundle().URLForResource("Moon River", withExtension: "mp3")!)
it will properly play the local file. now I want to be able to be able to stream non-local urls.
what do i need to do in order to allow me to stream mp3 urls?
class Audio: NSObject {
var graph: AUGraph
var filePlayerAU: AudioUnit
var filePlayerNode: AUNode
var outputAU: AudioUnit
var fileID: AudioFileID
var currentFrame: Int64
override init () {
graph = AUGraph()
filePlayerAU = AudioUnit()
filePlayerNode = AUNode()
outputAU = AudioUnit()
fileID = AudioFileID()
currentFrame = 0
super.init()
NewAUGraph(&graph)
// Add file player node
var cd = AudioComponentDescription(componentType: OSType(kAudioUnitType_Generator),
componentSubType: OSType(kAudioUnitSubType_AudioFilePlayer),
componentManufacturer: OSType(kAudioUnitManufacturer_Apple),
componentFlags: 0, componentFlagsMask: 0)
AUGraphAddNode(graph, &cd, &filePlayerNode)
// Add output node
var outputNode = AUNode()
cd.componentType = OSType(kAudioUnitType_Output)
cd.componentSubType = OSType(kAudioUnitSubType_RemoteIO)
AUGraphAddNode(graph, &cd, &outputNode)
// Graph must be opened before we can get node info!
AUGraphOpen(graph)
AUGraphNodeInfo(graph, filePlayerNode, nil, &filePlayerAU)
AUGraphNodeInfo(graph, outputNode, nil, &outputAU)
AUGraphConnectNodeInput(graph, filePlayerNode, 0, outputNode, 0)
AUGraphInitialize(graph)
registerCallbackForAU(filePlayerAU, nil)
}
func scheduleFile(url: NSURL) {
AudioFileOpenURL(url, 1, 0, &fileID)
// Step 1: schedule the file(s)
// kAudioUnitProperty_ScheduledFileIDs takes an array of AudioFileIDs
var filesToSchedule = [fileID]
AudioUnitSetProperty(filePlayerAU,
AudioUnitPropertyID(kAudioUnitProperty_ScheduledFileIDs),
AudioUnitScope(kAudioUnitScope_Global), 0, filesToSchedule,
UInt32(sizeof(AudioFileID)))
}
func scheduleRegion() {
// Step 2: Schedule the regions of the file(s) to play
// Swift forces us to fill out the structs completely, even if they are not used
let smpteTime = SMPTETime(mSubframes: 0, mSubframeDivisor: 0,
mCounter: 0, mType: 0, mFlags: 0,
mHours: 0, mMinutes: 0, mSeconds: 0, mFrames: 0)
var timeStamp = AudioTimeStamp(mSampleTime: 0, mHostTime: 0, mRateScalar: 0,
mWordClockTime: 0, mSMPTETime: smpteTime,
mFlags: UInt32(kAudioTimeStampSampleTimeValid), mReserved: 0)
var region = ScheduledAudioFileRegion()
region.mTimeStamp = timeStamp
region.mCompletionProc = nil
region.mCompletionProcUserData = nil
region.mAudioFile = fileID
region.mLoopCount = 0
region.mStartFrame = currentFrame
region.mFramesToPlay = UInt32.max
AudioUnitSetProperty(filePlayerAU,
AudioUnitPropertyID(kAudioUnitProperty_ScheduledFileRegion),
AudioUnitScope(kAudioUnitScope_Global), 0, ®ion,
UInt32(sizeof(ScheduledAudioFileRegion)))
// Step 3: Prime the file player
var primeFrames: UInt32 = 0
AudioUnitSetProperty(filePlayerAU,
AudioUnitPropertyID(kAudioUnitProperty_ScheduledFilePrime),
AudioUnitScope(kAudioUnitScope_Global), 0, &primeFrames,
UInt32(sizeof(UInt32)))
// Step 4: Schedule the start time (-1 = now)
timeStamp.mSampleTime = -1
AudioUnitSetProperty(filePlayerAU,
AudioUnitPropertyID(kAudioUnitProperty_ScheduleStartTimeStamp),
AudioUnitScope(kAudioUnitScope_Global), 0, &timeStamp,
UInt32(sizeof(AudioTimeStamp)))
}
}
If you are looking to play remote files(mp3) from a server. I would look into using AVPlayer().
https://developer.apple.com/documentation/avfoundation/avplayer
Here is a snippet that might nudge you in the right direction. Of course this is just a very basic example.