Skip to content

Commit

Permalink
Implement reading of data streams into Frame using FFmpegFrameGrabber
Browse files Browse the repository at this point in the history
  • Loading branch information
bytes-and-bits committed Feb 13, 2020
1 parent fe48427 commit 4c07809
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 3 deletions.
7 changes: 6 additions & 1 deletion src/main/java/org/bytedeco/javacv/FFmpegFrameGrabber.java
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@
import org.bytedeco.javacpp.PointerPointer;

import org.bytedeco.ffmpeg.avcodec.*;
import org.bytedeco.ffmpeg.avdevice.*;
import org.bytedeco.ffmpeg.avformat.*;
import org.bytedeco.ffmpeg.avutil.*;
import org.bytedeco.ffmpeg.swresample.*;
Expand Down Expand Up @@ -1272,6 +1271,8 @@ public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, b
}
}

frame.streamIndex = pkt.stream_index();

// Is this a packet from the video stream?
if (doVideo && video_st != null && pkt.stream_index() == video_st.index()
&& (!keyFrames || pkt.flags() == AV_PKT_FLAG_KEY)) {
Expand Down Expand Up @@ -1325,6 +1326,10 @@ public Frame grabFrame(boolean doAudio, boolean doVideo, boolean doProcessing, b
frame.keyFrame = samples_frame.key_frame() != 0;
}
}
} else {
// Export the stream byte data for non audio / video frames
frame.data = pkt.data().position(0).limit(pkt.size()).asByteBuffer();
done = true;
}

if (pkt2.size() <= 0) {
Expand Down
27 changes: 25 additions & 2 deletions src/main/java/org/bytedeco/javacv/Frame.java
Original file line number Diff line number Diff line change
Expand Up @@ -71,11 +71,12 @@ public class Frame implements Indexable {
DEPTH_LONG = -64,
DEPTH_FLOAT = 32,
DEPTH_DOUBLE = 64;

/** Constants defining data type in the frame*/
public static enum Type {
VIDEO,
AUDIO,
DATA
}

/** Information associated with the {@link #image} field. */
Expand All @@ -94,6 +95,12 @@ public static enum Type {
/** Buffers to hold audio samples from multiple channels for an audio frame. */
public Buffer[] samples;

/** Buffer to hold a data stream associated with a frame. */
public ByteBuffer data;

/** Stream number the audio|video|other data is associated with. */
public int streamIndex;

/** The underlying data object, for example, Pointer, AVFrame, IplImage, or Mat. */
public Object opaque;

Expand All @@ -119,6 +126,8 @@ public Frame(int width, int height, int depth, int channels, int imageStride) {
this.imageChannels = channels;
this.imageStride = imageStride;
this.image = new Buffer[1];
this.data = null;
this.streamIndex = -1;

Pointer pointer = new BytePointer(imageHeight * imageStride * pixelSize(depth));
ByteBuffer buffer = pointer.asByteBuffer();
Expand Down Expand Up @@ -207,7 +216,8 @@ public Frame clone() {
newFrame.imageChannels = imageChannels;
newFrame.imageStride = imageStride;
newFrame.keyFrame = keyFrame;
newFrame.opaque = new Pointer[2];
newFrame.streamIndex = streamIndex;
newFrame.opaque = new Pointer[3];
if (image != null) {
newFrame.image = new Buffer[image.length];
((Pointer[])newFrame.opaque)[0] = cloneBufferArray(image, newFrame.image);
Expand All @@ -221,6 +231,18 @@ public Frame clone() {
((Pointer[])newFrame.opaque)[1] = cloneBufferArray(samples, newFrame.samples);
}

// Other data streams
if (data != null) {
newFrame.data = ByteBuffer.allocate(data.capacity());
BytePointer pointer = new BytePointer(data.capacity());
newFrame.data = pointer.limit(pointer.position() + data.limit())
.asByteBuffer().put((ByteBuffer)data);
pointer.position(pointer.limit());
data.rewind();
newFrame.data.rewind();
((Pointer[])newFrame.opaque)[2] = pointer;
}

// Add timestamp
newFrame.timestamp = timestamp;

Expand Down Expand Up @@ -330,6 +352,7 @@ public EnumSet<Type> getTypes() {
EnumSet<Type> type = EnumSet.noneOf(Type.class);
if (image != null) type.add(Type.VIDEO);
if (samples != null) type.add(Type.AUDIO);
if (data != null) type.add(Type.DATA);
return type;
}
}

0 comments on commit 4c07809

Please sign in to comment.