001/*******************************************************************************
002 * Copyright (c) 2024, 2026, Olivier Ayache.  All rights reserved.
003 *
004 * This file is part of AVPKit.
005 *
006 * AVPKit is free software: you can redistribute it and/or modify
007 * it under the terms of the GNU Lesser General Public License as published by
008 * the Free Software Foundation, either version 3 of the License, or
009 * (at your option) any later version.
010 *
011 * AVPKit is distributed in the hope that it will be useful,
012 * but WITHOUT ANY WARRANTY; without even the implied warranty of
013 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
014 * GNU Lesser General Public License for more details.
015 *
016 * You should have received a copy of the GNU Lesser General Public License
017 * along with AVPKit.  If not, see <http://www.gnu.org/licenses/>.
018 *******************************************************************************/
019
020package com.avpkit.core.demos;
021
022import javax.imageio.ImageIO;
023import java.io.File;
024import java.awt.image.BufferedImage;
025
026import com.avpkit.core.Global;
027import com.avpkit.core.IContainer;
028import com.avpkit.core.IPacket;
029import com.avpkit.core.IPixelFormat;
030import com.avpkit.core.IStream;
031import com.avpkit.core.IStreamCoder;
032import com.avpkit.core.ICodec;
033import com.avpkit.core.IVideoPicture;
034import com.avpkit.core.IVideoResampler;
035import com.avpkit.core.Utils;
036
037/**
038 * Takes a media container, finds the first video stream, decodes that
039 * stream, and then writes video frames at some interval based on the
040 * video presentation time stamps.
041 *
042 * @author trebor
043 */
044
045public class DecodeAndCaptureFrames
046{
047  /** The number of seconds between frames. */
048
049  public static final double SECONDS_BETWEEN_FRAMES = 5;
050
051  /** The number of nano-seconds between frames. */
052
053  public static final long NANO_SECONDS_BETWEEN_FRAMES = 
054    (long)(Global.DEFAULT_PTS_PER_SECOND * SECONDS_BETWEEN_FRAMES);
055  
056  /** Time of last frame write. */
057  
058  private static long mLastPtsWrite = Global.NO_PTS;
059
060  /** Write the video frame out to a PNG file every once and a while.
061   * The files are written out to the system's temporary directory.
062   *
063   * @param picture the video frame which contains the time stamp.
064   * @param image the buffered image to write out
065   */
066
067  private static void processFrame(IVideoPicture picture, BufferedImage image)
068  {
069    try
070    {
071      // if uninitialized, backdate mLastPtsWrite so we get the very
072      // first frame
073
074      if (mLastPtsWrite == Global.NO_PTS)
075        mLastPtsWrite = picture.getPts() - NANO_SECONDS_BETWEEN_FRAMES;
076
077      // if it's time to write the next frame
078
079      if (picture.getPts() - mLastPtsWrite >= NANO_SECONDS_BETWEEN_FRAMES)
080      {
081        // Make a temorary file name
082
083        File file = File.createTempFile("frame", ".png");
084
085        // write out PNG
086
087        ImageIO.write(image, "png", file);
088
089        // indicate file written
090
091        double seconds = ((double)picture.getPts()) / Global.DEFAULT_PTS_PER_SECOND;
092        System.out.printf("at elapsed time of %6.3f seconds wrote: %s\n",
093          seconds, file);
094        
095        // update last write time
096        
097        mLastPtsWrite += NANO_SECONDS_BETWEEN_FRAMES;
098      }
099    }
100    catch (Exception e)
101    {
102      e.printStackTrace();
103    }
104  }
105
106  /**
107   * Takes a media container (file) as the first argument, opens it,
108   * reads through the file and captures video frames periodically as
109   * specified by SECONDS_BETWEEN_FRAMES.  The frames are written as PNG
110   * files into the system's temporary directory.
111   *  
112   * @param args must contain one string which represents a filename
113   */
114
115  @SuppressWarnings("deprecation")
116  public static void main(String[] args)
117  {
118    if (args.length <= 0)
119      throw new IllegalArgumentException(
120        "must pass in a filename as the first argument");
121
122    String filename = args[0];
123
124    // make sure that we can actually convert video pixel formats
125
126    if (!IVideoResampler.isSupported(
127        IVideoResampler.Feature.FEATURE_COLORSPACECONVERSION))
128      throw new RuntimeException(
129        "you must install the GPL version of AVPKit (with IVideoResampler" + 
130        " support) for this demo to work");
131
132    // create a AVPKit container object
133
134    IContainer container = IContainer.make();
135
136    // open up the container
137
138    if (container.open(filename, IContainer.Type.READ, null) < 0)
139      throw new IllegalArgumentException("could not open file: " + filename);
140
141    // query how many streams the call to open found
142
143    int numStreams = container.getNumStreams();
144
145    // and iterate through the streams to find the first video stream
146
147    int videoStreamId = -1;
148    IStreamCoder videoCoder = null;
149    for(int i = 0; i < numStreams; i++)
150    {
151      // find the stream object
152
153      IStream stream = container.getStream(i);
154
155      // get the pre-configured decoder that can decode this stream;
156
157      IStreamCoder coder = stream.getStreamCoder();
158
159      if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO)
160      {
161        videoStreamId = i;
162        videoCoder = coder;
163        break;
164      }
165    }
166
167    if (videoStreamId == -1)
168      throw new RuntimeException("could not find video stream in container: "+filename);
169
170    // Now we have found the video stream in this file.  Let's open up
171    // our decoder so it can do work
172
173    if (videoCoder.open() < 0)
174      throw new RuntimeException(
175        "could not open video decoder for container: " + filename);
176
177    IVideoResampler resampler = null;
178    if (videoCoder.getPixelType() != IPixelFormat.Type.BGR24)
179    {
180      // if this stream is not in BGR24, we're going to need to
181      // convert it.  The VideoResampler does that for us.
182
183      resampler = IVideoResampler.make(
184        videoCoder.getWidth(), videoCoder.getHeight(), IPixelFormat.Type.BGR24,
185        videoCoder.getWidth(), videoCoder.getHeight(), videoCoder.getPixelType());
186      if (resampler == null)
187        throw new RuntimeException(
188          "could not create color space resampler for: " + filename);
189    }
190
191    // Now, we start walking through the container looking at each packet.
192
193    IPacket packet = IPacket.make();
194    while(container.readNextPacket(packet) >= 0)
195    {
196      
197      // Now we have a packet, let's see if it belongs to our video strea
198
199      if (packet.getStreamIndex() == videoStreamId)
200      {
201        // We allocate a new picture to get the data out of AVPKit
202
203        IVideoPicture picture = IVideoPicture.make(videoCoder.getPixelType(),
204            videoCoder.getWidth(), videoCoder.getHeight());
205
206        int offset = 0;
207        while(offset < packet.getSize())
208        {
209          // Now, we decode the video, checking for any errors.
210
211          int bytesDecoded = videoCoder.decodeVideo(picture, packet, offset);
212          if (bytesDecoded < 0)
213            throw new RuntimeException("got error decoding video in: " + filename);
214          offset += bytesDecoded;
215          
216          // Some decoders will consume data in a packet, but will not
217          // be able to construct a full video picture yet.  Therefore
218          // you should always check if you got a complete picture from
219          // the decode.
220
221          if (picture.isComplete())
222          {
223            IVideoPicture newPic = picture;
224            
225            // If the resampler is not null, it means we didn't get the
226            // video in BGR24 format and need to convert it into BGR24
227            // format.
228
229            if (resampler != null)
230            {
231              // we must resample
232              newPic = IVideoPicture.make(
233                resampler.getOutputPixelFormat(), picture.getWidth(), 
234                picture.getHeight());
235              if (resampler.resample(newPic, picture) < 0)
236                throw new RuntimeException(
237                  "could not resample video from: " + filename);
238            }
239
240            if (newPic.getPixelType() != IPixelFormat.Type.BGR24)
241              throw new RuntimeException(
242                "could not decode video as BGR 24 bit data in: " + filename);
243
244            // convert the BGR24 to an Java buffered image
245
246            BufferedImage javaImage = Utils.videoPictureToImage(newPic);
247
248            // process the video frame
249
250            processFrame(newPic, javaImage);
251          }
252        }
253      }
254      else
255      {
256        // This packet isn't part of our video stream, so we just
257        // silently drop it.
258        do {} while(false);
259      }
260    }
261
262    // Technically since we're exiting anyway, these will be cleaned up
263    // by the garbage collector... but because we're nice people and
264    // want to be invited places for Christmas, we're going to show how
265    // to clean up.
266
267    if (videoCoder != null)
268    {
269      videoCoder.close();
270      videoCoder = null;
271    }
272    if (container !=null)
273    {
274      container.close();
275      container = null;
276    }
277  }
278}