can't sending buffer audio data using GCDAsyncUdpSocket from iOS version 17.2 AvAudioEngine in Swift

58 views Asked by At

When I send data from 17.3 phone and listen receiving socket data I can't see any data but When I send data from 15.3 phone and listen data I can see receiving data, I think problem is GCDAsyncUdpSocket in 17.3 version but when I changed method of GCDAsyncUdpSocket it doesn't work.

func setupAudio () {
    self.audioEngine = AVAudioEngine()
    self.audioPlayer = AVAudioPlayerNode()
    self.mixer = AVAudioMixerNode()
    self.audioEngine.attach(audioPlayer)
    self.mixer.volume = 0
    self.audioEngine.attach(mixer)
    
    self.socket = GCDAsyncUdpSocket(delegate: self, delegateQueue: DispatchQueue.main)
}

func setupAudioEngine() {
    
  let audioEngine = AVAudioEngine()

  let inputNode = audioEngine.inputNode
  let inputFormat = inputNode.inputFormat(forBus: 0)
    
    let hardwareSampleRate = audioEngine.inputNode.inputFormat(forBus: 0).sampleRate
    print(audioEngine.inputNode.inputFormat(forBus: 0).settings)
    print(inputNode.inputFormat(forBus: 0).formatDescription)
    
    let outputFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16,
                               sampleRate: hardwareSampleRate,
                               channels: 1,
                               interleaved: false)

    let converter = AVAudioConverter(from: inputFormat, to: outputFormat!)!
    
    DispatchQueue.global(qos: .background).async { [weak self] in
        guard let self = self else { return }
        do {
            self.socket.setIPv4Enabled(true)
            self.socket.setIPv6Enabled(false)
            try self.socket.connect(toHost:"232.10.10.100" ?? "", onPort: 3434 ?? 0)
            try self.socket.beginReceiving()
            print("Socket started")
        } catch {
            print("Socket Started Error: \(error)")
        }
    }
    

  inputNode.installTap(onBus: 0, bufferSize: 1024, format: inputFormat) {
    (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
    do {
      let inputBlock: AVAudioConverterInputBlock = { _, outStatus in
        outStatus.pointee = AVAudioConverterInputStatus.haveData
        return buffer
      }
      let frameCapacity =
        AVAudioFrameCount(outputFormat!.sampleRate) * buffer.frameLength
        / AVAudioFrameCount(buffer.format.sampleRate)
      let outputBuffer = AVAudioPCMBuffer(
        pcmFormat: outputFormat!,
        frameCapacity: frameCapacity
      )!
      var error: NSError?
      converter.convert(to: outputBuffer, error: &error, withInputFrom: inputBlock)
        
        let data = Data(buffer: UnsafeBufferPointer(start: outputBuffer.int16ChannelData![0], count: Int(outputBuffer.frameLength)))

        print(data)
        
        DispatchQueue.global(qos: .background).async { [weak self] in
            guard let self = self else { return }
            do {
                self.socket.send(data, withTimeout: 0, tag: 0)
                
            } catch {
                print("Socket send Error: \(error)")
            }
        }


    } catch {
      print(error)
    }
  }
    audioEngine.prepare()
    do {
        try audioEngine.start()
        print("Audio player started")
    } catch {
        print("Can't start the engine: \(error)")
    }
    
}
1

There are 1 answers

2
Rob Napier On

Most likely the format is different. The most common mistake here is the sample rate, which is in some cases 44.1kHz and in other cases 48kHz (there are some other options, down to 8kHz, but those are the most common). Generally this is due to differences in the hardware rather than the software. I'm betting your 15.3 phone is an older phone with a microphone sample rate of 44.1kHz, and your 17.3 phone is a newer phone with a 48kHz microphone.