I am using the navigator.mediaDevices.getUserMedia to access my personal microphone and web camera. I am able to access my images through my webcamera and display it on my html page. however, i am not able to simulatenously play the audio.
I tried to first play the audio with the stream variable being set for the video srcObject. This worked great. However, when I changed the srcObject for my video to the imagebitmap, my code has not played the audio simultaneously. I am getting an error in the console:Uncaught (in promise) DOMException: Failed to load because no supported source was found.
In this instance, you can ignore my jsonresponse as it is not doing anything other than returning a phrase. I'm first trying to access the audio chunks and imagebitmap properly. Here's the code:
{% extends "auctions/layout.html" %}
{% block body %}
<style>
#localVideo {
border-radius: 25px;
}
</style>
<center><video id="processedVideo" autoplay playsinline></video></center>
<script src="https://cdnjs.cloudflare.com/ajax/libs/simple-peer/9.11.1/simplepeer.min.js"></script>
<script>
const constraints = {
video: true,
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
// Set the desired bitrate (e.g., 128 kbps)
deviceId: {
exact: "default"
}
},
};
navigator.mediaDevices.getUserMedia({ video: true, audio: true })
.then(stream => {
//document.getElementById('processedVideo').srcObject = stream;
const videoTrack = stream.getVideoTracks()[0];
//const audioTrack = stream.getAudioTracks()[0];
const imageCapture = new ImageCapture(videoTrack);
const mediaRecorder = new MediaRecorder(stream);
// Function to handle rendering imageBitmap into srcObject
function renderImage(imageBitmap) {
console.log('Received video frame:', imageBitmap);
const canvas = document.createElement('canvas');
canvas.width = imageBitmap.width;
canvas.height = imageBitmap.height;
const ctx = canvas.getContext('2d');
ctx.drawImage(imageBitmap, 0, 0, imageBitmap.width, imageBitmap.height);
const newStream = canvas.captureStream();
document.getElementById('processedVideo').srcObject = newStream;
}
// Create a single Audio element for playback
console.log(mediaRecorder);
// Example: Grabbing video frames at intervals
setInterval(() => {
imageCapture.grabFrame()
.then(imageBitmap => {
renderImage(imageBitmap);
fetch('/image', {
method: 'PUT',
headers: { "Content-type": "application/json", "X-CSRFtoken": "{{ csrf_token }}" },
body: JSON.stringify({ image: imageBitmap })
})
.then(response => response.json())
.then(result => {
console.log(result);
});
})
.catch(error => console.error('Error grabbing frame:', error));
}, 200); // Adjust the interval as needed
// Start recording audio
mediaRecorder.ondataavailable = event => {
// Play audio chunk directly from MediaStream
var blobUrl = window.URL.createObjectURL(event.data);
console.log('Received audio chunk:', blobUrl);
const audioElement = new Audio(blobUrl); // Create Audio element
audioElement.play();
};
mediaRecorder.start(1000); // Adjust the interval as needed
})
.catch(error => console.error('Error accessing media devices:', error));
</script>
{% endblock %}