diff --git a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java index 29aa07fe..0f679bed 100644 --- a/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java +++ b/src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java @@ -68,7 +68,6 @@ import org.bytedeco.javacpp.PointerPointer; import org.bytedeco.ffmpeg.avcodec.*; -import org.bytedeco.ffmpeg.avdevice.*; import org.bytedeco.ffmpeg.avformat.*; import org.bytedeco.ffmpeg.avutil.*; import org.bytedeco.ffmpeg.swresample.*; @@ -1272,6 +1271,8 @@ public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, b } } + frame.streamIndex = pkt.stream_index(); + // Is this a packet from the video stream? if (doVideo && video_st != null && pkt.stream_index() == video_st.index() && (!keyFrames || pkt.flags() == AV_PKT_FLAG_KEY)) { @@ -1325,6 +1326,10 @@ public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, b frame.keyFrame = samples_frame.key_frame() != 0; } } + } else { + // Export the stream byte data for non audio / video frames + frame.data = pkt.data().position(0).limit(pkt.size()).asByteBuffer(); + done = true; } if (pkt2.size() <= 0) { diff --git a/src/main/java/org/bytedeco/javacv/Frame.java b/src/main/java/org/bytedeco/javacv/Frame.java index fa9e7c2c..c5badb82 100644 --- a/src/main/java/org/bytedeco/javacv/Frame.java +++ b/src/main/java/org/bytedeco/javacv/Frame.java @@ -71,11 +71,12 @@ public class Frame implements Indexable { DEPTH_LONG = -64, DEPTH_FLOAT = 32, DEPTH_DOUBLE = 64; - + /** Constants defining data type in the frame*/ public static enum Type { VIDEO, AUDIO, + DATA } /** Information associated with the {@link #image} field. */ @@ -94,6 +95,12 @@ public static enum Type { /** Buffers to hold audio samples from multiple channels for an audio frame. */ public Buffer[] samples; + /** Buffer to hold a data stream associated with a frame. */ + public ByteBuffer data; + + /** Stream number the audio|video|other data is associated with. */ + public int streamIndex; + /** The underlying data object, for example, Pointer, AVFrame, IplImage, or Mat. */ public Object opaque; @@ -119,6 +126,8 @@ public Frame(int width, int height, int depth, int channels, int imageStride) { this.imageChannels = channels; this.imageStride = imageStride; this.image = new Buffer[1]; + this.data = null; + this.streamIndex = -1; Pointer pointer = new BytePointer(imageHeight * imageStride * pixelSize(depth)); ByteBuffer buffer = pointer.asByteBuffer(); @@ -207,7 +216,8 @@ public Frame clone() { newFrame.imageChannels = imageChannels; newFrame.imageStride = imageStride; newFrame.keyFrame = keyFrame; - newFrame.opaque = new Pointer[2]; + newFrame.streamIndex = streamIndex; + newFrame.opaque = new Pointer[3]; if (image != null) { newFrame.image = new Buffer[image.length]; ((Pointer[])newFrame.opaque)[0] = cloneBufferArray(image, newFrame.image); @@ -221,6 +231,18 @@ public Frame clone() { ((Pointer[])newFrame.opaque)[1] = cloneBufferArray(samples, newFrame.samples); } + // Other data streams + if (data != null) { + newFrame.data = ByteBuffer.allocate(data.capacity()); + BytePointer pointer = new BytePointer(data.capacity()); + newFrame.data = pointer.limit(pointer.position() + data.limit()) + .asByteBuffer().put((ByteBuffer)data); + pointer.position(pointer.limit()); + data.rewind(); + newFrame.data.rewind(); + ((Pointer[])newFrame.opaque)[2] = pointer; + } + // Add timestamp newFrame.timestamp = timestamp; @@ -330,6 +352,7 @@ public EnumSet getTypes() { EnumSet type = EnumSet.noneOf(Type.class); if (image != null) type.add(Type.VIDEO); if (samples != null) type.add(Type.AUDIO); + if (data != null) type.add(Type.DATA); return type; } }