diff --git a/CMakeLists.txt b/CMakeLists.txt index 9a53358..1324fa2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -13,3 +13,5 @@ install(FILES hve.h DESTINATION include) add_executable(hve-encode-raw-h264 examples/hve_encode_raw_h264.c) target_link_libraries(hve-encode-raw-h264 hve) +add_executable(hve-encode-raw-hevc10 examples/hve_encode_raw_hevc10.c) +target_link_libraries(hve-encode-raw-hevc10 hve) diff --git a/README.md b/README.md index d5432e0..9049166 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ This library wraps hardware video encoding in a simple interface. There are no performance loses (at the cost of library flexibility). -Currently it supports VAAPI and H.264 standard. +Currently it supports VAAPI and various codecs (H.264, HEVC, ...). See library [documentation](https://bmegli.github.io/hardware-video-encoder/group__interface.html). @@ -13,10 +13,10 @@ See [hardware-video-streaming](https://github.com/bmegli/hardware-video-streamin ## Intended Use -Raw H.264 encoding: +Raw encoding (H264, HEVC, ...): - custom network streaming protocols - low latency streaming -- raw H.264 dumping +- raw dumping (H264, HEVC, ...) - ... Complex pipelines (muxing, scaling, color conversions, filtering) are beyond the scope of this library. @@ -63,13 +63,18 @@ cmake .. make ``` -## Running Example +## Running Examples ``` bash # ./hve-encode-raw-h264 [device] ./hve-encode-raw-h264 10 ``` +``` bash +# ./hve-encode-raw-hevc10 [device] +./hve-encode-raw-hevc10 10 +``` + ### Troubleshooting If you have multiple VAAPI devices you may have to specify Intel directly. @@ -81,19 +86,21 @@ sudo apt-get install vainfo vainfo --display drm --device /dev/dri/renderD128 ``` -Once you identify your Intel device run the example, e.g. +Once you identify your Intel device run the examples, e.g. ```bash ./hve-encode-raw-h264 10 /dev/dri/renderD128 +./hve-encode-raw-hevc10 10 /dev/dri/renderD128 ``` ## Testing -Play result raw H.264 file with FFmpeg: +Play result raw H.264/HEVC file with FFmpeg: ``` bash -# output goes to output.h264 file +# output goes to output.h264/output.hevc file ffplay output.h264 +ffplay output.hevc ``` You should see procedurally generated video (moving through greyscale). @@ -110,11 +117,11 @@ There are just 4 functions and 3 user-visible data types: ```C struct hve_config hardware_config = {WIDTH, HEIGHT, FRAMERATE, DEVICE, - PIXEL_FORMAT, PROFILE, BFRAMES, BITRATE}; + ENCODER, PIXEL_FORMAT, PROFILE, BFRAMES, BITRATE}; struct hve *hardware_encoder=hve_init(&hardware_config); struct hve_frame frame = { 0 }; - //later assuming PIXEL_FORMAT is "nv12" (you can use something else) + //later assuming PIXEL_FORMAT is "nv12" (you may use something else) //fill with your stride (width including padding if any) frame.linesize[0] = frame.linesize[1] = WIDTH; diff --git a/examples/hve_encode_raw_h264.c b/examples/hve_encode_raw_h264.c index 9f71efc..a55e88b 100644 --- a/examples/hve_encode_raw_h264.c +++ b/examples/hve_encode_raw_h264.c @@ -19,8 +19,9 @@ const int HEIGHT=720; const int FRAMERATE=30; int SECONDS=10; const char *DEVICE=NULL; //NULL for default or device e.g. "/dev/dri/renderD128" +const char *ENCODER=NULL;//NULL for default (h264_vaapi) or FFmpeg encoder e.g. "hevc_vaapi", ... const char *PIXEL_FORMAT="nv12"; //NULL for default (NV12) or pixel format e.g. "rgb0" -const int PROFILE=FF_PROFILE_H264_HIGH; //or FF_PROFILE_H264_MAIN, FF_PROFILE_H264_CONSTRAINED_BASELINE, ... +const int PROFILE=FF_PROFILE_H264_HIGH; //or FF_PROFILE_HEVC_MAIN, FF_PROFILE_H264_CONSTRAINED_BASELINE, ... const int BFRAMES=0; //max_b_frames, set to 0 to minimize latency, non-zero to minimize size const int BITRATE=0; //average bitrate in VBR @@ -36,7 +37,7 @@ int main(int argc, char* argv[]) return -1; //prepare library data - struct hve_config hardware_config = {WIDTH, HEIGHT, FRAMERATE, DEVICE, PIXEL_FORMAT, PROFILE, BFRAMES, BITRATE}; + struct hve_config hardware_config = {WIDTH, HEIGHT, FRAMERATE, DEVICE, ENCODER, PIXEL_FORMAT, PROFILE, BFRAMES, BITRATE}; struct hve *hardware_encoder; //prepare file for raw H.264 output @@ -82,7 +83,7 @@ int encoding_loop(struct hve *hardware_encoder, FILE *output_file) for(f=0;f + * + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. + * + */ + +#include //printf, fprintf +#include //uint8_t, uint16_t + +#include "../hve.h" + +const int WIDTH=1280; +const int HEIGHT=720; +const int FRAMERATE=30; +int SECONDS=10; +const char *DEVICE=NULL; //NULL for default or device e.g. "/dev/dri/renderD128" +const char *ENCODER="hevc_vaapi";//NULL for default (h264_vaapi) or FFmpeg encoder e.g. "hevc_vaapi", ... +const char *PIXEL_FORMAT="p010le"; //NULL for default (nv12) or pixel format e.g. "rgb0", ... +const int PROFILE=FF_PROFILE_HEVC_MAIN_10; //or FF_PROFILE_HEVC_MAIN, ... +const int BFRAMES=0; //max_b_frames, set to 0 to minimize latency, non-zero to minimize size +const int BITRATE=0; //average bitrate in VBR + +int encoding_loop(struct hve *hardware_encoder, FILE *output_file); +int process_user_input(int argc, char* argv[]); +int hint_user_on_failure(char *argv[]); +void hint_user_on_success(); + +int main(int argc, char* argv[]) +{ + //get SECONDS and DEVICE from the command line + if( process_user_input(argc, argv) < 0 ) + return -1; + + //prepare library data + struct hve_config hardware_config = {WIDTH, HEIGHT, FRAMERATE, DEVICE, ENCODER, PIXEL_FORMAT, PROFILE, BFRAMES, BITRATE}; + struct hve *hardware_encoder; + + //prepare file for raw HEVC output + FILE *output_file = fopen("output.hevc", "w+b"); + if(output_file == NULL) + return fprintf(stderr, "unable to open file for output\n"); + + //initialize library with hve_init + if( (hardware_encoder = hve_init(&hardware_config)) == NULL ) + { + fclose(output_file); + return hint_user_on_failure(argv); + } + + //do the actual encoding + int status = encoding_loop(hardware_encoder, output_file); + + hve_close(hardware_encoder); + fclose(output_file); + + if(status == 0) + hint_user_on_success(); + + return 0; +} + +int encoding_loop(struct hve *hardware_encoder, FILE *output_file) +{ + struct hve_frame frame = { 0 }; + int frames=SECONDS*FRAMERATE, f, failed, i; + + //we are working with P010LE because we specified p010le pixel format + //when calling hve_init, in principle we could use other format + //if hardware supported it (e.g. RGB0 is supported on my Intel) + uint16_t Y[WIDTH*HEIGHT]; //dummy p010le luminance data (or p016le) + uint16_t color[WIDTH*HEIGHT/2]; //dummy p010le color data (or p016le) + + //fill with your stride (width including padding if any) + frame.linesize[0] = frame.linesize[1] = WIDTH*2; + + //encoded data is returned in FFmpeg packet + AVPacket *packet; + + for(f=0;fdata, packet->size, 1, output_file); + } + + //NULL packet and non-zero failed indicates failure during encoding + if(failed) + break; //break on error + } + + //flush the encoder by sending NULL frame, encode some last frames returned from hardware + hve_send_frame(hardware_encoder, NULL); + while( (packet=hve_receive_packet(hardware_encoder, &failed)) ) + fwrite(packet->data, packet->size, 1, output_file); + + //did we encode everything we wanted? + //convention 0 on success, negative on failure + return f == frames ? 0 : -1; +} + +int process_user_input(int argc, char* argv[]) +{ + if(argc < 2) + { + fprintf(stderr, "Usage: %s [device]\n", argv[0]); + fprintf(stderr, "\nexamples:\n"); + fprintf(stderr, "%s 10\n", argv[0]); + fprintf(stderr, "%s 10 /dev/dri/renderD128\n", argv[0]); + return -1; + } + + SECONDS = atoi(argv[1]); + DEVICE=argv[2]; //NULL as last argv argument, or device path + + return 0; +} + +int hint_user_on_failure(char *argv[]) +{ + fprintf(stderr, "unable to initalize encoder, try to specify device e.g:\n\n"); + fprintf(stderr, "%s 10 /dev/dri/renderD128\n", argv[0]); + return -1; +} + +void hint_user_on_success() +{ + printf("finished successfully\n"); + printf("output written to \"out.hevc\" file\n"); + printf("test with:\n\n"); + printf("ffplay output.hevc\n"); +} diff --git a/hve.c b/hve.c index a6a55d4..fe6db9a 100644 --- a/hve.c +++ b/hve.c @@ -59,7 +59,9 @@ struct hve *hve_init(const struct hve_config *config) return hve_close_and_return_null(h); } - if(!(codec = avcodec_find_encoder_by_name("h264_vaapi"))) + const char *encoder = (config->encoder != NULL && config->encoder[0] != '\0') ? config->encoder : "h264_vaapi"; + + if(!(codec = avcodec_find_encoder_by_name(encoder))) { fprintf(stderr, "hve: could not find encoder\n"); return hve_close_and_return_null(h); diff --git a/hve.h b/hve.h index ffbe331..9ecceb8 100644 --- a/hve.h +++ b/hve.h @@ -47,13 +47,30 @@ struct hve; * @brief Encoder configuration * * The device can be: - * - NULL (select automatically) + * - NULL or empty string (select automatically) * - point to valid device e.g. "/dev/dri/renderD128" for vaapi * * If you have multiple VAAPI devices (e.g. NVidia GPU + Intel) you may have * to specify Intel directly. NVidia will not work through VAAPI for encoding * (it works through VAAPI-VDPAU bridge and VDPAU is only for decoding). * + * The encoder can be: + * - NULL or empty string for "h264_vaapi" + * - valid ffmpeg encoder + * + * You may check encoders supported by your hardware with ffmpeg: + * @code + * ffmpeg -encoders | grep vaapi + * @endcode + * + * Encoders typically can be: + * - h264_vaapi + * - hevc_vaapi + * - mjpeg_vaapi + * - mpeg2_vaapi + * - vp8_vaapi + * - vp9_vaapi + * * The pixel_format (format of what you upload) typically can be: * - nv12 (this is generally safe choice) * - yuv420p @@ -62,18 +79,29 @@ struct hve; * - yuv422p * - rgb0 * - bgr0 + * - p010le * * There are no software color conversions in this library. * * For pixel format explanation see: * FFmpeg pixel formats * - * The profile (H.264 profile) can typically be: + * The available profiles depend on used encoder. Use 0 to guess from input. + * + * For possible profiles see: + * FFmpeg profiles + * + * For H.264 profile can typically be: * - FF_PROFILE_H264_CONSTRAINED_BASELINE * - FF_PROFILE_H264_MAIN * - FF_PROFILE_H264_HIGH * - ... * + * For HEVC profile can typically be: + * - FF_PROFILE_HEVC_MAIN + * - FF_PROFILE_HEVC_MAIN_10 (10 bit channel precision) + * - ... + * * You may check profiles supported by your hardware with vainfo: * @code * vainfo --display drm --device /dev/dri/renderDXYZ @@ -93,8 +121,9 @@ struct hve_config int height; //!< height of the encoded frames int framerate; //!< framerate of the encoded video const char *device; //!< NULL / "" or device, e.g. "/dev/dri/renderD128" - const char *pixel_format; //!< NULL / "" for NV12 or format, e.g. "rgb0", "bgr0", "nv12", "yuv420p" - int profile; //!< 0 to guess from input or profile e.g. FF_PROFILE_H264_MAIN, FF_PROFILE_H264_HIGH + const char *encoder; //!< NULL / "" or encoder, e.g. "h264_vaapi" + const char *pixel_format; //!< NULL / "" for NV12 or format, e.g. "rgb0", "bgr0", "nv12", "yuv420p", "p010le" + int profile; //!< 0 to guess from input or profile e.g. FF_PROFILE_H264_MAIN, FF_PROFILE_H264_HIGH, FF_PROFILE_HEVC_MAIN, ... int max_b_frames; //!< maximum number of B-frames between non-B-frames (disable if you need low latency) int bit_rate; //!< the average bitrate in VBR mode };