In web development, we often need to work with media files, including video files. Sometimes, we may need to extract specific frames from a video and display them on a web page. In this article, we will describe how to do this using JavaScript.

 

 1. Listener file selection


First, we need to place a file selection box on the page so that the user can select a video file. By listening to the change event of the file selection box, we can get the video file selected by the user.

document.querySelector("input").onchange = (e) => {
    const videoFile = e.target.files[0];
    captureFrame(videoFile, 0);
};

 2. Capturing video frames


Next, we define a function called captureFrame which is used to capture video frames. In this function, we create a video element and set its attributes, including the current playback time, autoplay, and mute (due to browser autoplay policies, some browsers don’t allow autoplay of videos and require mute). When the video is ready to play, we call the drawImage function to draw the video frame to the canvas and display the result on the page.

function captureFrame(videoFile, time) {
    const video = document.createElement("video");
    video.currentTime = time;
    video.muted = true;
    video.autoplay = true;
    video.oncanplay = async () => {
        if (time > video.duration) {
            throw new Error("2");
        }
        const { url } = await drawImage(video);
        const img = document.querySelector("img");
        img.src = url;
    };
    video.src = URL.createObjectURL(videoFile);
}

 3. Drawing video frames


drawImage function is used to draw the video frame onto the canvas and convert the drawing result into a Blob object. First, we create a canvas element and set its width and height to the same size as the video. Then, we get the 2D drawing context through the getContext method of the canvas and call the drawImage method to draw the video frame onto the canvas. Finally, the drawing result is converted to a Blob object via the canvas’s toBlob method and a Promise containing the Blob object and URL is returned.

function drawImage(video) {
    return new Promise((resolve, reject) => {
        const canvas = document.createElement("canvas");
        canvas.width = video.videoWidth;
        canvas.height = video.videoHeight;
        const ctx = canvas.getContext("2d");
        ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
        canvas.toBlob((blob) => {
            resolve({
                blob,
                url: URL.createObjectURL(blob),
            });
        });
    });
}

 4. Realization of effects


With the above steps, we can realize the function of capturing video frames and displaying them on the web page. After the user selects the video file, the web page will automatically extract the video frames and display them in the specified location, which brings more possibilities for web development, such as creating video previews, video clips and other functions.


In practice, we can extend and optimize the code to meet different needs, JavaScript provides us with a rich API and functions, through the flexible use, we can achieve a variety of functions to provide users with a better experience.

 

<!--
<!DOCTYPE html>
<html lang="en">
  <head>
    <meta charset="UTF-8" />
    <meta name="viewport" content="width=device-width, initial-scale=1.0" />
    <title>2</title>
  </head>
  <body>
    <div>
      <input type="file" />
    </div>
    <div style="margin: 10px">
      <img style="width: 100%" src="" alt="" />
    </div>
  </body>
  <script>
    document.querySelector("input").onchange = (e) => {
      const videoFile = e.target.files[0];
      captureFrame(videoFile, 10);
    };

     document.querySelector("input").onchange = (e) => {
       const videoFile = e.target.files[0];

       captureFrame(videoFile, 0);
    };

     async function drawImage(video) {
       return new Promise((resolve, reject) => {
         const canvas = document.createElement("canvas");
         canvas.width = video.videoWidth;
        canvas.height = video.videoHeight;
         const ctx = canvas.getContext("2d");
         ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
         canvas.toBlob((blob) => {
           resolve({
            blob, 
            url: URL.createObjectURL(blob),
          });
        });
      });
    }

  
    function captureFrame(videoFile, time) {
 
      const video = document.createElement("video");
 
      video.currentTime = time;
 
      video.muted = true;
 
      video.autoplay = true;
 
      video.oncanplay = async () => {
 
        console.log("视频时长:", video.duration);
        if (time > video.duration) {
          throw new Error("1");
        }

         const { url } = await drawImage(video);
         const img = document.querySelector("img");
 
        img.src = url;
      };
 
      video.src = URL.createObjectURL(videoFile);
    }
  </script>
</html>

By lzz

Leave a Reply

Your email address will not be published. Required fields are marked *