David Callanan David Callanan - 2 months ago 8x
Javascript Question

How to convert array of png image data into video file

I am getting frames from


However, now I have array of png images, but I want video file.

How do I do this?

Thank people of internet.

var canvas = document.getElementById("mycanvaselementforvideocapturing");
var pngimages = [];
setInterval(function(){pngimages.push(canvas.toDataURL())}, 1000);

Thanks a million!


For a full browser support way, you'll have to send your image batch to the server then use some server-side program to do the encoding.

FFmpeg might be able to do it.

But in newest browsers the canvas.captureStream method, has been implemented. It will convert your canvas drawings to a webm video stream, recordable with a MediaRecorder. All of this is still not stabilized though, and will only be available in latest version of browsers, probably with some flags set in user's preferences (e.g chrome needs the "Experimental Web Platforms" one).

Also, browsers have different implementations of the MediaRecorder API. I.E, chrome will send chunks of the recorded media every x time, through the ondataavailable event, while Firefox will only fire this event when the recorder has been stopped, exposing the full blob in the evt.data property.
Unfortunately, since FF doesn't send the chunks, we can't have a single method to handle both implementations, and we have to check for the state property of the MediaRecorder.

var cStream, recorder,
  savedChunks = [];

rec.onclick = function() {
  this.textContent = 'stop recording';

  // set the framerate to 30FPS
  var cStream = canvas.captureStream(30);
  // create a recorder fed with our canvas' stream
  recorder = new MediaRecorder(cStream);
  // start it
  // what to do when it finishes for FF and what to do with each chunk for chrome
  recorder.ondataavailable = saveStream;
  // change our button's function
  this.onclick = stopRecording;

// fires every x time in chrome 
// and only when the recorder stopped in FF
function saveStream(e) {

  if (e.data.size > 0 && this.state === 'recording') {
    // chrome chunks...
  } else if (this.state === 'inactive' && !savedChunks.length) {
    // FF end event


function stopRecording() {
  // this might be chrome
  if (savedChunks.length) {
    exportStream(new Blob(savedChunks));

function exportStream(blob) {

  var vidURL = URL.createObjectURL(blob);
  var vid = document.createElement('video');
  vid.controls = true;
  vid.src = vidURL;
  vid.onend = function() {

  document.body.insertBefore(vid, canvas);

// make something move on the canvas
var x = 0;
var ctx = canvas.getContext('2d');

var anim = function() {
  x = (x + 2) % (canvas.width + 100);
  // there is no transparency in webm,
  // so we need to set a background otherwise every transparent pixel will become opaque black
  ctx.fillStyle = 'ivory';
  ctx.fillRect(0, 0, canvas.width, canvas.height);
  ctx.fillStyle = 'black';
  ctx.fillRect(x - 50, 20, 50, 50)
<canvas id="canvas" width="500" height="200"></canvas>
<button id="rec">record</button>

And since you asked for a way to add audio to this video, note that you can use cStream.addStream(anAudioStream.getAudioTracks()[0]); before calling new MediaRecorder(cStream), but this will currently only work in chrome, and only for audio tracks coming from a getUserMedia call, I wasn't able to use one from createMediaStreamDestination, and FF seems to have a bug in MediaRecorder which makes it record only the stream with the tracks it was defined to...