001/* ----------------------------------------------------------------------------
002 * This file was automatically generated by SWIG (http://www.swig.org).
003 * Version 4.0.2
004 *
005 * Do not make changes to this file unless you know what you are doing--modify
006 * the SWIG interface file instead.
007 * ----------------------------------------------------------------------------- */
008
009package com.avpkit.core;
010import com.avpkit.ferry.*;
011/**
012 * Information about how video data is formatted in an {IVideoPicture} object.<br>
013 * <br>
014 * This specifies the color space and how many bits pixel data takes.  It also<br>
015 * includes some utility methods for dealing with {Type#YUV420P} data; the<br>
016 * most common type of encoding used in video files I've run across.
017 */
018public class IPixelFormat extends RefCounted {
019  // JNIHelper.swg: Start generated code
020  // >>>>>>>>>>>>>>>>>>>>>>>>>>>
021  /**
022   * This method is only here to use some references and remove
023   * a Eclipse compiler warning.
024   */
025  @SuppressWarnings("unused")
026  private void noop()
027  {
028    IBuffer.make(null, 1);
029  }
030   
031  private volatile long swigCPtr;
032
033  /**
034   * Internal Only.
035   */
036  protected IPixelFormat(long cPtr, boolean cMemoryOwn) {
037    super(AVPKitJNI.IPixelFormat_SWIGUpcast(cPtr), cMemoryOwn);
038    swigCPtr = cPtr;
039  }
040  
041  /**
042   * Internal Only.
043   */
044  protected IPixelFormat(long cPtr, boolean cMemoryOwn,
045      java.util.concurrent.atomic.AtomicLong ref)
046  {
047    super(AVPKitJNI.IPixelFormat_SWIGUpcast(cPtr),
048     cMemoryOwn, ref);
049    swigCPtr = cPtr;
050  }
051    
052  /**
053   * Internal Only.  Not part of public API.
054   *
055   * Get the raw value of the native object that obj is proxying for.
056   *   
057   * @param obj The java proxy object for a native object.
058   * @return The raw pointer obj is proxying for.
059   */
060  public static long getCPtr(IPixelFormat obj) {
061    if (obj == null) return 0;
062    return obj.getMyCPtr();
063  }
064
065  /**
066   * Internal Only.  Not part of public API.
067   *
068   * Get the raw value of the native object that we're proxying for.
069   *   
070   * @return The raw pointer we're proxying for.
071   */  
072  public long getMyCPtr() {
073    if (swigCPtr == 0) throw new IllegalStateException("underlying native object already deleted");
074    return swigCPtr;
075  }
076  
077  /**
078   * Create a new IPixelFormat object that is actually referring to the
079   * exact same underlying native object.
080   *
081   * @return the new Java object.
082   */
083  @Override
084  public IPixelFormat copyReference() {
085    if (swigCPtr == 0)
086      return null;
087    else
088      return new IPixelFormat(swigCPtr, swigCMemOwn, getJavaRefCount());
089  }
090
091  /**
092   * Compares two values, returning true if the underlying objects in native code are the same object.
093   *
094   * That means you can have two different Java objects, but when you do a comparison, you'll find out
095   * they are the EXACT same object.
096   *
097   * @return True if the underlying native object is the same.  False otherwise.
098   */
099  public boolean equals(Object obj) {
100    boolean equal = false;
101    if (obj instanceof IPixelFormat)
102      equal = (((IPixelFormat)obj).swigCPtr == this.swigCPtr);
103    return equal;
104  }
105  
106  /**
107   * Get a hashable value for this object.
108   *
109   * @return the hashable value.
110   */
111  public int hashCode() {
112     return (int)swigCPtr;
113  }
114  
115  // <<<<<<<<<<<<<<<<<<<<<<<<<<<
116  // JNIHelper.swg: End generated code
117  
118
119
120  /**
121   * Returns the byte for the coordinates at x and y for the color component c.<br>
122   * <br>
123   * @param frame The frame to get the byte from<br>
124   * @param x X coordinate in pixels, where 0 is the left hand edge of the image. <br>
125   * @param y Y coordinate in pixels, where 0 is the top edge of the image. <br>
126   * @param c YUVColor component<br>
127   * <br>
128   * @throws std::exception frame is null, the coordinates are invalid, or if the pixel format is not YUV420P<br>
129   * <br>
130   * @return the pixel byte for that x, y, c combination 
131   */
132  public static short getYUV420PPixel(IVideoPicture frame, int x, int y, IPixelFormat.YUVColorComponent c) {
133    return AVPKitJNI.IPixelFormat_getYUV420PPixel(IVideoPicture.getCPtr(frame), frame, x, y, c.swigValue());
134  }
135
136  /**
137   * Sets the value of the color component c at the coordinates x and y in the given frame.<br>
138   * <br>
139   * @param frame The frame to set the byte in<br>
140   * @param x X coordinate in pixels, where 0 is the left hand edge of the image. <br>
141   * @param y Y coordinate in pixels, where 0 is the top edge of the image. <br>
142   * @param c YUVColor component to set<br>
143   * @param value The new value of that pixel color component<br>
144   * <br>
145   * @throws std::exception frame is null, the coordinates are invalid, or if the pixel format is not YUV420P 
146   */
147  public static void setYUV420PPixel(IVideoPicture frame, int x, int y, IPixelFormat.YUVColorComponent c, short value) {
148    AVPKitJNI.IPixelFormat_setYUV420PPixel(IVideoPicture.getCPtr(frame), frame, x, y, c.swigValue(), value);
149  }
150
151  /**
152   * For a given x and y in a frame, and a given color components, this method<br>
153   * tells you how far into the actual data you'd have to go to find the byte that<br>
154   * represents that color/coordinate combination.<br>
155   * <br>
156   * @param frame The frame to get the byte from<br>
157   * @param x X coordinate in pixels, where 0 is the left hand edge of the image. <br>
158   * @param y Y coordinate in pixels, where 0 is the top edge of the image. <br>
159   * @param c YUVColor component<br>
160   * <br>
161   * @throws std::exception frame is null, the coordinates are invalid, or if the pixel format is not YUV420P<br>
162   * <br>
163   * @return the offset in bytes, starting from the start of the frame data, where<br>
164   *   the data for this pixel resides.
165   */
166  public static int getYUV420PPixelOffset(IVideoPicture frame, int x, int y, IPixelFormat.YUVColorComponent c) {
167    return AVPKitJNI.IPixelFormat_getYUV420PPixelOffset(IVideoPicture.getCPtr(frame), frame, x, y, c.swigValue());
168  }
169
170  /**
171   * Pixel format. Notes:<br>
172   * <br>
173   * RGB32 is handled in an endian-specific manner. A RGBA<br>
174   * color is put together as:<br>
175   *  (A &lt;< 24) | (R &lt;< 16) | (G &lt;< 8) | B<br>
176   * This is stored as BGRA on little endian CPU architectures and ARGB on<br>
177   * big endian CPUs.<br>
178   * <br>
179   * When the pixel format is palettized RGB (PAL8), the palettized<br>
180   * image data is stored in AVFrame.data[0]. The palette is transported in<br>
181   * AVFrame.data[1] and, is 1024 bytes long (256 4-byte entries) and is<br>
182   * formatted the same as in RGB32 described above (i.e., it is<br>
183   * also endian-specific). Note also that the individual RGB palette<br>
184   * components stored in AVFrame.data[1] should be in the range 0..255.<br>
185   * This is important as many custom PAL8 video codecs that were designed<br>
186   * to run on the IBM VGA graphics adapter use 6-bit palette components.
187   */
188  public enum Type {
189    NONE(AVPKitJNI.IPixelFormat_NONE_get()),
190    /**
191     *  planar YUV 4:2:0, 12bpp, (1 Cr &amp; Cb sample per 2x2 Y samples)
192     */
193    YUV420P,
194    /**
195     *  packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
196     */
197    YUYV422,
198    /**
199     *  packed RGB 8:8:8, 24bpp, RGBRGB...
200     */
201    RGB24,
202    /**
203     *  packed RGB 8:8:8, 24bpp, BGRBGR...
204     */
205    BGR24,
206    /**
207     *  planar YUV 4:2:2, 16bpp, (1 Cr &amp; Cb sample per 2x1 Y samples)
208     */
209    YUV422P,
210    /**
211     *  planar YUV 4:4:4, 24bpp, (1 Cr &amp; Cb sample per 1x1 Y samples)
212     */
213    YUV444P,
214    /**
215     *  planar YUV 4:1:0,  9bpp, (1 Cr &amp; Cb sample per 4x4 Y samples)
216     */
217    YUV410P,
218    /**
219     *  planar YUV 4:1:1, 12bpp, (1 Cr &amp; Cb sample per 4x1 Y samples)
220     */
221    YUV411P,
222    /**
223     *         Y        ,  8bpp
224     */
225    GRAY8,
226    /**
227     *         Y        ,  1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
228     */
229    MONOWHITE,
230    /**
231     *         Y        ,  1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
232     */
233    MONOBLACK,
234    /**
235     *  8 bits with RGB32 palette
236     */
237    PAL8,
238    /**
239     *  planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of YUV420P and setting color_range
240     */
241    YUVJ420P,
242    /**
243     *  planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of YUV422P and setting color_range
244     */
245    YUVJ422P,
246    /**
247     *  planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of YUV444P and setting color_range
248     */
249    YUVJ444P,
250    /**
251     *  packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
252     */
253    UYVY422,
254    /**
255     *  packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
256     */
257    UYYVYY411,
258    /**
259     *  packed RGB 3:3:2,  8bpp, (msb)2B 3G 3R(lsb)
260     */
261    BGR8,
262    /**
263     *  packed RGB 1:2:1 bitstream,  4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
264     */
265    BGR4,
266    /**
267     *  packed RGB 1:2:1,  8bpp, (msb)1B 2G 1R(lsb)
268     */
269    BGR4_BYTE,
270    /**
271     *  packed RGB 3:3:2,  8bpp, (msb)2R 3G 3B(lsb)
272     */
273    RGB8,
274    /**
275     *  packed RGB 1:2:1 bitstream,  4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
276     */
277    RGB4,
278    /**
279     *  packed RGB 1:2:1,  8bpp, (msb)1R 2G 1B(lsb)
280     */
281    RGB4_BYTE,
282    /**
283     *  planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
284     */
285    NV12,
286    /**
287     *  as above, but U and V bytes are swapped
288     */
289    NV21,
290    /**
291     *  packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
292     */
293    ARGB,
294    /**
295     *  packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
296     */
297    RGBA,
298    /**
299     *  packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
300     */
301    ABGR,
302    /**
303     *  packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
304     */
305    BGRA,
306    /**
307     *         Y        , 16bpp, big-endian
308     */
309    GRAY16BE,
310    /**
311     *         Y        , 16bpp, little-endian
312     */
313    GRAY16LE,
314    /**
315     *  planar YUV 4:4:0 (1 Cr &amp; Cb sample per 1x2 Y samples)
316     */
317    YUV440P,
318    /**
319     *  planar YUV 4:4:0 full scale (JPEG), deprecated in favor of YUV440P and setting color_range
320     */
321    YUVJ440P,
322    /**
323     *  planar YUV 4:2:0, 20bpp, (1 Cr &amp; Cb sample per 2x2 Y &amp; A samples)
324     */
325    YUVA420P,
326    /**
327     *  packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
328     */
329    RGB48BE,
330    /**
331     *  packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
332     */
333    RGB48LE,
334    /**
335     *  packed RGB 5:6:5, 16bpp, (msb)   5R 6G 5B(lsb), big-endian
336     */
337    RGB565BE,
338    /**
339     *  packed RGB 5:6:5, 16bpp, (msb)   5R 6G 5B(lsb), little-endian
340     */
341    RGB565LE,
342    /**
343     *  packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian   , X=unused/undefined
344     */
345    RGB555BE,
346    /**
347     *  packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined
348     */
349    RGB555LE,
350    /**
351     *  packed BGR 5:6:5, 16bpp, (msb)   5B 6G 5R(lsb), big-endian
352     */
353    BGR565BE,
354    /**
355     *  packed BGR 5:6:5, 16bpp, (msb)   5B 6G 5R(lsb), little-endian
356     */
357    BGR565LE,
358    /**
359     *  packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian   , X=unused/undefined
360     */
361    BGR555BE,
362    /**
363     *  packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined
364     */
365    BGR555LE,
366    /**
367     * HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers
368     */
369    VAAPI_MOCO,
370    /**
371     *  HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers
372     */
373    VAAPI_IDCT,
374    /**
375     *  HW decoding through VA API, Picture.data[3] contains a VASurfaceID
376     */
377    VAAPI_VLD,
378    /**
379     * 
380     */
381    VAAPI(AVPKitJNI.IPixelFormat_VAAPI_get()),
382    /**
383     *  planar YUV 4:2:0, 24bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), little-endian
384     */
385    YUV420P16LE,
386    /**
387     *  planar YUV 4:2:0, 24bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), big-endian
388     */
389    YUV420P16BE,
390    /**
391     *  planar YUV 4:2:2, 32bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), little-endian
392     */
393    YUV422P16LE,
394    /**
395     *  planar YUV 4:2:2, 32bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), big-endian
396     */
397    YUV422P16BE,
398    /**
399     *  planar YUV 4:4:4, 48bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), little-endian
400     */
401    YUV444P16LE,
402    /**
403     *  planar YUV 4:4:4, 48bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), big-endian
404     */
405    YUV444P16BE,
406    /**
407     *  HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
408     */
409    DXVA2_VLD,
410    /**
411     *  packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined
412     */
413    RGB444LE,
414    /**
415     *  packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian,    X=unused/undefined
416     */
417    RGB444BE,
418    /**
419     *  packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined
420     */
421    BGR444LE,
422    /**
423     *  packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian,    X=unused/undefined
424     */
425    BGR444BE,
426    /**
427     *  8 bits gray, 8 bits alpha
428     */
429    YA8,
430    /**
431     *  alias for YA8
432     */
433    Y400A(AVPKitJNI.IPixelFormat_Y400A_get()),
434    /**
435     *  alias for YA8
436     */
437    GRAY8A(AVPKitJNI.IPixelFormat_GRAY8A_get()),
438    /**
439     *  packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
440     */
441    BGR48BE,
442    /**
443     *  packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
444     */
445    BGR48LE,
446    /**
447     * The following 12 formats have the disadvantage of needing 1 format for each bit depth.<br>
448     * Notice that each 9/10 bits sample is stored in 16 bits with extra padding.<br>
449     * If you want to support multiple bit depths, then using YUV420P16* with the bpp stored separately is better.<br>
450     *           planar YUV 4:2:0, 13.5bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), big-endian
451     */
452    YUV420P9BE,
453    /**
454     *  planar YUV 4:2:0, 13.5bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), little-endian
455     */
456    YUV420P9LE,
457    /**
458     *  planar YUV 4:2:0, 15bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), big-endian
459     */
460    YUV420P10BE,
461    /**
462     *  planar YUV 4:2:0, 15bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), little-endian
463     */
464    YUV420P10LE,
465    /**
466     *  planar YUV 4:2:2, 20bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), big-endian
467     */
468    YUV422P10BE,
469    /**
470     *  planar YUV 4:2:2, 20bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), little-endian
471     */
472    YUV422P10LE,
473    /**
474     *  planar YUV 4:4:4, 27bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), big-endian
475     */
476    YUV444P9BE,
477    /**
478     *  planar YUV 4:4:4, 27bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), little-endian
479     */
480    YUV444P9LE,
481    /**
482     *  planar YUV 4:4:4, 30bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), big-endian
483     */
484    YUV444P10BE,
485    /**
486     *  planar YUV 4:4:4, 30bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), little-endian
487     */
488    YUV444P10LE,
489    /**
490     *  planar YUV 4:2:2, 18bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), big-endian
491     */
492    YUV422P9BE,
493    /**
494     *  planar YUV 4:2:2, 18bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), little-endian
495     */
496    YUV422P9LE,
497    /**
498     *  planar GBR 4:4:4 24bpp
499     */
500    GBRP,
501    GBR24P(AVPKitJNI.IPixelFormat_GBR24P_get()),
502    /**
503     *  planar GBR 4:4:4 27bpp, big-endian
504     */
505    GBRP9BE,
506    /**
507     *  planar GBR 4:4:4 27bpp, little-endian
508     */
509    GBRP9LE,
510    /**
511     *  planar GBR 4:4:4 30bpp, big-endian
512     */
513    GBRP10BE,
514    /**
515     *  planar GBR 4:4:4 30bpp, little-endian
516     */
517    GBRP10LE,
518    /**
519     *  planar GBR 4:4:4 48bpp, big-endian
520     */
521    GBRP16BE,
522    /**
523     *  planar GBR 4:4:4 48bpp, little-endian
524     */
525    GBRP16LE,
526    /**
527     *  planar YUV 4:2:2 24bpp, (1 Cr &amp; Cb sample per 2x1 Y &amp; A samples)
528     */
529    YUVA422P,
530    /**
531     *  planar YUV 4:4:4 32bpp, (1 Cr &amp; Cb sample per 1x1 Y &amp; A samples)
532     */
533    YUVA444P,
534    /**
535     *  planar YUV 4:2:0 22.5bpp, (1 Cr &amp; Cb sample per 2x2 Y &amp; A samples), big-endian
536     */
537    YUVA420P9BE,
538    /**
539     *  planar YUV 4:2:0 22.5bpp, (1 Cr &amp; Cb sample per 2x2 Y &amp; A samples), little-endian
540     */
541    YUVA420P9LE,
542    /**
543     *  planar YUV 4:2:2 27bpp, (1 Cr &amp; Cb sample per 2x1 Y &amp; A samples), big-endian
544     */
545    YUVA422P9BE,
546    /**
547     *  planar YUV 4:2:2 27bpp, (1 Cr &amp; Cb sample per 2x1 Y &amp; A samples), little-endian
548     */
549    YUVA422P9LE,
550    /**
551     *  planar YUV 4:4:4 36bpp, (1 Cr &amp; Cb sample per 1x1 Y &amp; A samples), big-endian
552     */
553    YUVA444P9BE,
554    /**
555     *  planar YUV 4:4:4 36bpp, (1 Cr &amp; Cb sample per 1x1 Y &amp; A samples), little-endian
556     */
557    YUVA444P9LE,
558    /**
559     *  planar YUV 4:2:0 25bpp, (1 Cr &amp; Cb sample per 2x2 Y &amp; A samples, big-endian)
560     */
561    YUVA420P10BE,
562    /**
563     *  planar YUV 4:2:0 25bpp, (1 Cr &amp; Cb sample per 2x2 Y &amp; A samples, little-endian)
564     */
565    YUVA420P10LE,
566    /**
567     *  planar YUV 4:2:2 30bpp, (1 Cr &amp; Cb sample per 2x1 Y &amp; A samples, big-endian)
568     */
569    YUVA422P10BE,
570    /**
571     *  planar YUV 4:2:2 30bpp, (1 Cr &amp; Cb sample per 2x1 Y &amp; A samples, little-endian)
572     */
573    YUVA422P10LE,
574    /**
575     *  planar YUV 4:4:4 40bpp, (1 Cr &amp; Cb sample per 1x1 Y &amp; A samples, big-endian)
576     */
577    YUVA444P10BE,
578    /**
579     *  planar YUV 4:4:4 40bpp, (1 Cr &amp; Cb sample per 1x1 Y &amp; A samples, little-endian)
580     */
581    YUVA444P10LE,
582    /**
583     *  planar YUV 4:2:0 40bpp, (1 Cr &amp; Cb sample per 2x2 Y &amp; A samples, big-endian)
584     */
585    YUVA420P16BE,
586    /**
587     *  planar YUV 4:2:0 40bpp, (1 Cr &amp; Cb sample per 2x2 Y &amp; A samples, little-endian)
588     */
589    YUVA420P16LE,
590    /**
591     *  planar YUV 4:2:2 48bpp, (1 Cr &amp; Cb sample per 2x1 Y &amp; A samples, big-endian)
592     */
593    YUVA422P16BE,
594    /**
595     *  planar YUV 4:2:2 48bpp, (1 Cr &amp; Cb sample per 2x1 Y &amp; A samples, little-endian)
596     */
597    YUVA422P16LE,
598    /**
599     *  planar YUV 4:4:4 64bpp, (1 Cr &amp; Cb sample per 1x1 Y &amp; A samples, big-endian)
600     */
601    YUVA444P16BE,
602    /**
603     *  planar YUV 4:4:4 64bpp, (1 Cr &amp; Cb sample per 1x1 Y &amp; A samples, little-endian)
604     */
605    YUVA444P16LE,
606    /**
607     *  HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface
608     */
609    VDPAU,
610    /**
611     *  packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0
612     */
613    XYZ12LE,
614    /**
615     *  packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0
616     */
617    XYZ12BE,
618    /**
619     *  interleaved chroma YUV 4:2:2, 16bpp, (1 Cr &amp; Cb sample per 2x1 Y samples)
620     */
621    NV16,
622    /**
623     *  interleaved chroma YUV 4:2:2, 20bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), little-endian
624     */
625    NV20LE,
626    /**
627     *  interleaved chroma YUV 4:2:2, 20bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), big-endian
628     */
629    NV20BE,
630    /**
631     *  packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
632     */
633    RGBA64BE,
634    /**
635     *  packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
636     */
637    RGBA64LE,
638    /**
639     *  packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
640     */
641    BGRA64BE,
642    /**
643     *  packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
644     */
645    BGRA64LE,
646    /**
647     *  packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb
648     */
649    YVYU422,
650    /**
651     *  16 bits gray, 16 bits alpha (big-endian)
652     */
653    YA16BE,
654    /**
655     *  16 bits gray, 16 bits alpha (little-endian)
656     */
657    YA16LE,
658    /**
659     *  planar GBRA 4:4:4:4 32bpp
660     */
661    GBRAP,
662    /**
663     *  planar GBRA 4:4:4:4 64bpp, big-endian
664     */
665    GBRAP16BE,
666    /**
667     *  planar GBRA 4:4:4:4 64bpp, little-endian
668     */
669    GBRAP16LE,
670    /**
671     *  HW acceleration through QSV, data[3] contains a pointer to the<br>
672     *  mfxFrameSurface1 structure.
673     */
674    QSV,
675    /**
676     * HW acceleration though MMAL, data[3] contains a pointer to the<br>
677     * MMAL_BUFFER_HEADER_T structure.
678     */
679    MMAL,
680    /**
681     *  HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer
682     */
683    D3D11VA_VLD,
684    /**
685     * HW acceleration through CUDA. data[i] contain CUdeviceptr pointers<br>
686     * exactly as for system memory frames.
687     */
688    CUDA,
689    /**
690     *  packed RGB 8:8:8, 32bpp, XRGBXRGB...   X=unused/undefined
691     */
692    FMT_0RGB,
693    /**
694     *  packed RGB 8:8:8, 32bpp, RGBXRGBX...   X=unused/undefined
695     */
696    RGB0,
697    /**
698     *  packed BGR 8:8:8, 32bpp, XBGRXBGR...   X=unused/undefined
699     */
700    FMT_0BGR,
701    /**
702     *  packed BGR 8:8:8, 32bpp, BGRXBGRX...   X=unused/undefined
703     */
704    BGR0,
705    /**
706     *  planar YUV 4:2:0,18bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), big-endian
707     */
708    YUV420P12BE,
709    /**
710     *  planar YUV 4:2:0,18bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), little-endian
711     */
712    YUV420P12LE,
713    /**
714     *  planar YUV 4:2:0,21bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), big-endian
715     */
716    YUV420P14BE,
717    /**
718     *  planar YUV 4:2:0,21bpp, (1 Cr &amp; Cb sample per 2x2 Y samples), little-endian
719     */
720    YUV420P14LE,
721    /**
722     *  planar YUV 4:2:2,24bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), big-endian
723     */
724    YUV422P12BE,
725    /**
726     *  planar YUV 4:2:2,24bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), little-endian
727     */
728    YUV422P12LE,
729    /**
730     *  planar YUV 4:2:2,28bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), big-endian
731     */
732    YUV422P14BE,
733    /**
734     *  planar YUV 4:2:2,28bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), little-endian
735     */
736    YUV422P14LE,
737    /**
738     *  planar YUV 4:4:4,36bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), big-endian
739     */
740    YUV444P12BE,
741    /**
742     *  planar YUV 4:4:4,36bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), little-endian
743     */
744    YUV444P12LE,
745    /**
746     *  planar YUV 4:4:4,42bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), big-endian
747     */
748    YUV444P14BE,
749    /**
750     *  planar YUV 4:4:4,42bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), little-endian
751     */
752    YUV444P14LE,
753    /**
754     *  planar GBR 4:4:4 36bpp, big-endian
755     */
756    GBRP12BE,
757    /**
758     *  planar GBR 4:4:4 36bpp, little-endian
759     */
760    GBRP12LE,
761    /**
762     *  planar GBR 4:4:4 42bpp, big-endian
763     */
764    GBRP14BE,
765    /**
766     *  planar GBR 4:4:4 42bpp, little-endian
767     */
768    GBRP14LE,
769    /**
770     *  planar YUV 4:1:1, 12bpp, (1 Cr &amp; Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of YUV411P and setting color_range
771     */
772    YUVJ411P,
773    /**
774     *  bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples
775     */
776    BAYER_BGGR8,
777    /**
778     *  bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples
779     */
780    BAYER_RGGB8,
781    /**
782     *  bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples
783     */
784    BAYER_GBRG8,
785    /**
786     *  bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples
787     */
788    BAYER_GRBG8,
789    /**
790     *  bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian
791     */
792    BAYER_BGGR16LE,
793    /**
794     *  bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian
795     */
796    BAYER_BGGR16BE,
797    /**
798     *  bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian
799     */
800    BAYER_RGGB16LE,
801    /**
802     *  bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian
803     */
804    BAYER_RGGB16BE,
805    /**
806     *  bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian
807     */
808    BAYER_GBRG16LE,
809    /**
810     *  bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian
811     */
812    BAYER_GBRG16BE,
813    /**
814     *  bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian
815     */
816    BAYER_GRBG16LE,
817    /**
818     *  bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian
819     */
820    BAYER_GRBG16BE,
821    /**
822     *  XVideo Motion Acceleration via common packet passing
823     */
824    XVMC,
825    /**
826     *  planar YUV 4:4:0,20bpp, (1 Cr &amp; Cb sample per 1x2 Y samples), little-endian
827     */
828    YUV440P10LE,
829    /**
830     *  planar YUV 4:4:0,20bpp, (1 Cr &amp; Cb sample per 1x2 Y samples), big-endian
831     */
832    YUV440P10BE,
833    /**
834     *  planar YUV 4:4:0,24bpp, (1 Cr &amp; Cb sample per 1x2 Y samples), little-endian
835     */
836    YUV440P12LE,
837    /**
838     *  planar YUV 4:4:0,24bpp, (1 Cr &amp; Cb sample per 1x2 Y samples), big-endian
839     */
840    YUV440P12BE,
841    /**
842     *  packed AYUV 4:4:4,64bpp (1 Cr &amp; Cb sample per 1x1 Y &amp; A samples), little-endian
843     */
844    AYUV64LE,
845    /**
846     *  packed AYUV 4:4:4,64bpp (1 Cr &amp; Cb sample per 1x1 Y &amp; A samples), big-endian
847     */
848    AYUV64BE,
849    /**
850     *  hardware decoding through Videotoolbox
851     */
852    VIDEOTOOLBOX,
853    /**
854     *  like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian
855     */
856    P010LE,
857    /**
858     *  like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian
859     */
860    P010BE,
861    /**
862     *  HW decoding through Android MediaCodec       
863     */
864    MEDIACODEC,
865    /**
866     *  planar GBR 4:4:4:4 48bpp, big-endian
867     */
868    GBRAP12BE,
869    /**
870     *  planar GBR 4:4:4:4 48bpp, little-endian
871     */
872    GBRAP12LE,
873    /**
874     *  planar GBR 4:4:4:4 40bpp, big-endian
875     */
876    GBRAP10BE,
877    /**
878     *  planar GBR 4:4:4:4 40bpp, little-endian
879     */
880    GBRAP10LE,
881    /**
882     *         Y        , 12bpp, big-endian
883     */
884    GRAY12BE,
885    /**
886     *         Y        , 12bpp, little-endian
887     */
888    GRAY12LE,
889    /**
890     *         Y        , 10bpp, big-endian
891     */
892    GRAY10BE,
893    /**
894     *         Y        , 10bpp, little-endian
895     */
896    GRAY10LE,
897    /**
898     *  like NV12, with 16bpp per component, little-endian
899     */
900    P016LE,
901    /**
902     *  like NV12, with 16bpp per component, big-endian
903     */
904    P016BE,
905    /**
906     * Hardware surfaces for Direct3D11.<br>
907     * <br>
908     * This is preferred over the legacy D3D11VA_VLD. The new D3D11<br>
909     * hwaccel API and filtering support D3D11 only.<br>
910     * <br>
911     * data[0] contains a ID3D11Texture2D pointer, and data[1] contains the<br>
912     * texture array index of the frame as intptr_t if the ID3D11Texture2D is<br>
913     * an array texture (or always 0 if it's a normal texture).
914     */
915    D3D11,
916    /**
917     *         Y        , 9bpp, big-endian
918     */
919    GRAY9BE,
920    /**
921     *         Y        , 9bpp, little-endian
922     */
923    GRAY9LE,
924    /**
925     *  IEEE-754 single precision planar GBR 4:4:4,     96bpp, big-endian
926     */
927    GBRPF32BE,
928    /**
929     *  IEEE-754 single precision planar GBR 4:4:4,     96bpp, little-endian
930     */
931    GBRPF32LE,
932    /**
933     *  IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian
934     */
935    GBRAPF32BE,
936    /**
937     *  IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian
938     */
939    GBRAPF32LE,
940    /**
941     * DRM-managed buffers exposed through PRIME buffer sharing.<br>
942     * <br>
943     * data[0] points to an AVDRMFrameDescriptor.
944     */
945    DRM_PRIME,
946    /**
947     * Hardware surfaces for OpenCL.<br>
948     * <br>
949     * data[i] contain 2D image objects (typed in C as cl_mem, used<br>
950     * in OpenCL as image2d_t) for each plane of the surface.
951     */
952    OPENCL,
953    /**
954     *         Y        , 14bpp, big-endian
955     */
956    GRAY14BE,
957    /**
958     *         Y        , 14bpp, little-endian
959     */
960    GRAY14LE,
961    /**
962     *  IEEE-754 single precision Y, 32bpp, big-endian
963     */
964    GRAYF32BE,
965    /**
966     *  IEEE-754 single precision Y, 32bpp, little-endian
967     */
968    GRAYF32LE,
969    /**
970     *  planar YUV 4:2:2,24bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), 12b alpha, big-endian
971     */
972    YUVA422P12BE,
973    /**
974     *  planar YUV 4:2:2,24bpp, (1 Cr &amp; Cb sample per 2x1 Y samples), 12b alpha, little-endian
975     */
976    YUVA422P12LE,
977    /**
978     *  planar YUV 4:4:4,36bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), 12b alpha, big-endian
979     */
980    YUVA444P12BE,
981    /**
982     *  planar YUV 4:4:4,36bpp, (1 Cr &amp; Cb sample per 1x1 Y samples), 12b alpha, little-endian
983     */
984    YUVA444P12LE,
985    /**
986     *  planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
987     */
988    NV24,
989    /**
990     *  as above, but U and V bytes are swapped
991     */
992    NV42,
993    /**
994     * Vulkan hardware images.<br>
995     * <br>
996     * data[0] points to an AVVkFrame
997     */
998    VULKAN,
999    /**
1000     *  packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian
1001     */
1002    Y210BE,
1003    /**
1004     *  packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian
1005     */
1006    Y210LE,
1007    /**
1008     *  number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
1009     */
1010    NB;
1011
1012    public final int swigValue() {
1013      return swigValue;
1014    }
1015
1016    public static Type swigToEnum(int swigValue) {
1017      Type[] swigValues = Type.class.getEnumConstants();
1018      if (swigValue < swigValues.length && swigValue >= 0 && swigValues[swigValue].swigValue == swigValue)
1019        return swigValues[swigValue];
1020      for (Type swigEnum : swigValues)
1021        if (swigEnum.swigValue == swigValue)
1022          return swigEnum;
1023      throw new IllegalArgumentException("No enum " + Type.class + " with value " + swigValue);
1024    }
1025
1026    @SuppressWarnings("unused")
1027    private Type() {
1028      this.swigValue = SwigNext.next++;
1029    }
1030
1031    @SuppressWarnings("unused")
1032    private Type(int swigValue) {
1033      this.swigValue = swigValue;
1034      SwigNext.next = swigValue+1;
1035    }
1036
1037    @SuppressWarnings("unused")
1038    private Type(Type swigEnum) {
1039      this.swigValue = swigEnum.swigValue;
1040      SwigNext.next = this.swigValue+1;
1041    }
1042
1043    private final int swigValue;
1044
1045    private static class SwigNext {
1046      private static int next = 0;
1047    }
1048  }
1049
1050  public enum YUVColorComponent {
1051    YUV_Y(AVPKitJNI.IPixelFormat_YUV_Y_get()),
1052    YUV_U(AVPKitJNI.IPixelFormat_YUV_U_get()),
1053    YUV_V(AVPKitJNI.IPixelFormat_YUV_V_get());
1054
1055    public final int swigValue() {
1056      return swigValue;
1057    }
1058
1059    public static YUVColorComponent swigToEnum(int swigValue) {
1060      YUVColorComponent[] swigValues = YUVColorComponent.class.getEnumConstants();
1061      if (swigValue < swigValues.length && swigValue >= 0 && swigValues[swigValue].swigValue == swigValue)
1062        return swigValues[swigValue];
1063      for (YUVColorComponent swigEnum : swigValues)
1064        if (swigEnum.swigValue == swigValue)
1065          return swigEnum;
1066      throw new IllegalArgumentException("No enum " + YUVColorComponent.class + " with value " + swigValue);
1067    }
1068
1069    @SuppressWarnings("unused")
1070    private YUVColorComponent() {
1071      this.swigValue = SwigNext.next++;
1072    }
1073
1074    @SuppressWarnings("unused")
1075    private YUVColorComponent(int swigValue) {
1076      this.swigValue = swigValue;
1077      SwigNext.next = swigValue+1;
1078    }
1079
1080    @SuppressWarnings("unused")
1081    private YUVColorComponent(YUVColorComponent swigEnum) {
1082      this.swigValue = swigEnum.swigValue;
1083      SwigNext.next = this.swigValue+1;
1084    }
1085
1086    private final int swigValue;
1087
1088    private static class SwigNext {
1089      private static int next = 0;
1090    }
1091  }
1092
1093}