First of all, we need to clarify what functions we need to do in this case, like at least one function to open the video and one to get a frame, and to save resources, and another function to close the video and free up the memory.
To tell Java that JNI is used, we use the keyword native.
The Java class will then look something like this:
Zuerst müssen wir klären welche Funktionen wir dazu benötigen, in diesem Fall benötigen wie mindestens eine Funktion um das Video zu öffnen und eine um uns ein Frame zu holen, und um Ressourcen zu sparen noch eine Funktion um das Video zu schließen und den Speicher wieder frei zu geben.
Um Java mitzuteilen das die Methode JNI nutzt gibt es das Schlüsselwort native.
Die Java Klasse sieht dann ungefähr wie folgt aus:
/**
* JNI-Wrapper for the FFMPEG-library
*
* To start programs that use this class you must add
* -Djava.library.path=[path to jffmpeg.so] to the command line
*
* @author Darkstar
* @version 0.1
*/
public class JFFMPEG {
/**
* Get the next frame as BufferedImage
*
* @return the next frame
* @TODO: improve error handling
*/
public static BufferedImage get() {
int[][] raw = getFrame();
if (raw==null)
return null;
BufferedImage bi = new BufferedImage(
raw.length,
raw[0].length,
BufferedImage.TYPE_INT_RGB);
for (int x=0; x<raw.length; x++) {
for (int y=0; y<raw[0].length; y++) {
bi.setRGB(x, y, raw[x][y]);
}
}
return bi;
}
/**
* Load native library at runtime
*/
static {
System.loadLibrary("jffmpeg");
}
/**
* Initializes the FFMPG-library, and opens the video
*
* @param the video to open
* @TODO: improve error handling
*/
public static native void initFFMPEG(String videoFile);
/**
* Closes the video file and releases the memory
*
* @TODO: improve error handling
*/
public static native void exitFFMPEG();
/**
* Determines the number of frames in the video
*
* @return: frame count
* @TODO: improve error handling
*/
public static native long getFrameCount();
/**
* Move the filepointer to a specific frame
*
* @param targe frame
* @TODO: improve error handling
*/
public static native void seek(long frame);
/**
* Get the next frame as Raw Pixel Data
*
* @return the next frame
* @TODO: improve error handling
*/
public static native int[][] getFrame();
/**
* Get a specific frame as Raw Pixel Data
*
* @param targe frame
* @return the next frame
* @TODO: improve error handling
*/
public static native int[][] getFrame(long frame);
}
$ javah de.darkstar.video.tools.jffmpeg.JFFMPEG
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>
#include "de_darkstar_video_tools_jffmpeg_JFFMPEG.h"
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
AVFormatContext *pFormatCtx = NULL;
int i, videoStream;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame;
AVFrame *pFrameRGB;
AVPacket packet;
int frame_finished;
int numBytes;
uint8_t *buffer;
AVStream *video_st;
/*
* Class: de_darkstar_video_tools_jffmpeg_JFFMPEG
* Method: initFFMPEG
* Signature: (Ljava/lang/String;)V
*/
JNIEXPORT void JNICALL Java_de_darkstar_video_tools_jffmpeg_JFFMPEG_initFFMPEG (JNIEnv *env, jclass class, jstring fileName) {
char* path;
path = (*env)->GetStringUTFChars(env, fileName, NULL) ;
av_register_all();
if(avformat_open_input(&pFormatCtx, path, NULL, NULL)!=0) {
fprintf(stderr, "avformat_open_input() failed: %s\n", path);
return -1;
}
if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
fprintf(stderr, "avformat_find_stream_info() failed\n");
return -1;
}
av_dump_format(pFormatCtx, 0, path, 0);
videoStream=-1;
for(i=0; i<pFormatCtx->nb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
videoStream=i;
break;
}
if(videoStream==-1) {
fprintf(stderr, "Didn't find a video stream\n");
return -1;
}
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
video_st = pFormatCtx->streams[videoStream];
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) {
fprintf(stderr, "Unsupported codec!\n");
return -1;
}
if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
fprintf(stderr, "avcodec_open fail\n");
return -1;
}
pFrame=av_frame_alloc();
pFrameRGB=av_frame_alloc();
if(pFrameRGB==NULL)
return -1;
numBytes = av_image_get_buffer_size(
AV_PIX_FMT_RGB24,
pCodecCtx->width,
pCodecCtx->height, 1 );
buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t) * 4);
av_image_fill_arrays(
pFrameRGB->data,
pFrameRGB->linesize,
buffer,
AV_PIX_FMT_RGBA,
pCodecCtx->width,
pCodecCtx->height,
1
);
i=0;
}
/*
* Class: de_darkstar_video_tools_jffmpeg_JFFMPEG
* Method: exitFFMPEG
* Signature: ()V
*/
JNIEXPORT void JNICALL Java_de_darkstar_video_tools_jffmpeg_JFFMPEG_exitFFMPEG (JNIEnv *env, jclass class) {
av_free(buffer);
av_free(pFrameRGB);
av_free(pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}
jobjectArray castToJava2DimArray(JNIEnv *env, AVFrame *pFrame, int width, int height, int iFrame) {
jint rgb[width][height];
unsigned int i;
int x;
int y;
jclass intArrayClass = (*env)->FindClass(env, "[I");
jobjectArray jniRGB = (*env)->NewObjectArray(env, (jsize) width, intArrayClass, NULL);
rgb[3][2] = 0xaffe;
for(y=0; y<height; y++) {
for(x=0; x<width; x++) {
uint8_t* pos = pFrame->data[0]+y*pFrame->linesize[0] + x*3;
int r = *(pos++);
int g = *(pos++);
int b = *(pos++);
rgb[x][y] = (r << 16) + (g << 8) + b;
}
}
for (i = 0; i < width; i++) {
jintArray intArray = (*env)->NewIntArray(env, height);
(*env)->SetIntArrayRegion(env, intArray, (jsize) 0, (jsize) height, rgb[i]);
(*env)->SetObjectArrayElement(env, jniRGB, (jsize) i, intArray);
(*env)->DeleteLocalRef(env, intArray);
}
return jniRGB;
}
/*
* Class: de_darkstar_video_tools_jffmpeg_JFFMPEG
* Method: getFrame
* Signature: ()[[I
*/
JNIEXPORT jobjectArray JNICALL Java_de_darkstar_video_tools_jffmpeg_JFFMPEG_getFrame__ (JNIEnv *env, jclass class) {
i=0;
int frameCount = 0;
int getone = 0;
jobjectArray result = 0;
while(av_read_frame(pFormatCtx, &packet)>=0 && getone < 1) {
if(packet.stream_index==videoStream) {
avcodec_decode_video2(
pCodecCtx,
pFrame,
&frame_finished,
&packet
);
if(frame_finished) {
struct SwsContext *img_convert_ctx;
int w = video_st->codec->width;
int h = video_st->codec->height;
int scaled_w = 320;
int scaled_h = 200;
img_convert_ctx = sws_getContext(
w, h,
video_st->codec->pix_fmt,
scaled_w, scaled_h,
AV_PIX_FMT_RGB24, SWS_BICUBIC,
NULL, NULL, NULL
);
if(img_convert_ctx == NULL) {
fprintf(stderr, "Cannot initialize the conversion context!\n");
break;
}
sws_scale(img_convert_ctx, (const uint8_t * const*)pFrame->data,
pFrame->linesize, 0,
video_st->codec->height,
pFrameRGB->data, pFrameRGB->linesize);
result = castToJava2DimArray(env, pFrameRGB, scaled_w, scaled_h, i++);
getone = 1;
frameCount++;
sws_freeContext(img_convert_ctx);
}
}
av_free_packet(&packet);
}
return result;
}
/*
* Class: de_darkstar_video_tools_jffmpeg_JFFMPEG
* Method: getFrameCount
* Signature: ()J
*/
JNIEXPORT jlong JNICALL Java_de_darkstar_video_tools_jffmpeg_JFFMPEG_getFrameCount (JNIEnv *env, jclass class) {
return 1337;
}
/*
* Class: de_darkstar_video_tools_jffmpeg_JFFMPEG
* Method: seek
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_de_darkstar_video_tools_jffmpeg_JFFMPEG_seek (JNIEnv *env, jclass class, jlong frame) {
}
/*
* Class: de_darkstar_video_tools_jffmpeg_JFFMPEG
* Method: getFrame
* Signature: (J)[[I
*/
JNIEXPORT jobjectArray JNICALL Java_de_darkstar_video_tools_jffmpeg_JFFMPEG_getFrame__J (JNIEnv *env, jclass class, jlong frame) {
return 0;
}
gcc -O -O3 -c -fPIC jffmpeg.c -o jffmpeg.o -I/usr/lib64/jvm/jdk1.8.latest/include -I/usr/lib64/jvm/jdk1.8.latest/include/linux
gcc -shared -o libjffmpeg.so jffmpeg.o -Wl,-rpath,/usr/local/lib64 -L/usr/local/lib64 -lswscale -lavformat -lavcodec -lavutil
-Djava.library.path=[path to jffmpeg.so]