Git Inbox Mirror of the ffmpeg-devel mailing list - see https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
 help / color / mirror / Atom feed
From: "Xiang, Haihao" <haihao.xiang-at-intel.com@ffmpeg.org>
To: "ffmpeg-devel@ffmpeg.org" <ffmpeg-devel@ffmpeg.org>
Subject: Re: [FFmpeg-devel] [PATCH 2/5] avcodec/hevcdec: make set_side_data() accessible
Date: Wed, 29 Dec 2021 06:10:52 +0000
Message-ID: <241786aa97219943c2679ab7d32ab91fc2866c77.camel@intel.com> (raw)
In-Reply-To: <DM8P223MB0365431A9992DAC3E7C4769ABA669@DM8P223MB0365.NAMP223.PROD.OUTLOOK.COM>

On Mon, 2021-11-29 at 18:51 +0000, Soft Works wrote:
> Signed-off-by: softworkz <softworkz@hotmail.com>
> ---
>  libavcodec/hevcdec.c | 112 +++++++++++++++++++++----------------------
>  libavcodec/hevcdec.h |   2 +
>  2 files changed, 57 insertions(+), 57 deletions(-)
> 
> diff --git a/libavcodec/hevcdec.c b/libavcodec/hevcdec.c
> index 46d9edf8eb..53593d6186 100644
> --- a/libavcodec/hevcdec.c
> +++ b/libavcodec/hevcdec.c
> @@ -2720,22 +2720,20 @@ error:
>      return res;
>  }
>  
> -static int set_side_data(HEVCContext *s)
> +int ff_set_side_data(AVCodecContext *logctx, HEVCSEI *sei, HEVCContext *s,
> AVFrame *out)

It would be better to use ff_hevc_ as prefix because this function is for hevconly. 

Thanks
Haihao

>  {
> -    AVFrame *out = s->ref->frame;
> -
> -    if (s->sei.frame_packing.present &&
> -        s->sei.frame_packing.arrangement_type >= 3 &&
> -        s->sei.frame_packing.arrangement_type <= 5 &&
> -        s->sei.frame_packing.content_interpretation_type > 0 &&
> -        s->sei.frame_packing.content_interpretation_type < 3) {
> +    if (sei->frame_packing.present &&
> +        sei->frame_packing.arrangement_type >= 3 &&
> +        sei->frame_packing.arrangement_type <= 5 &&
> +        sei->frame_packing.content_interpretation_type > 0 &&
> +        sei->frame_packing.content_interpretation_type < 3) {
>          AVStereo3D *stereo = av_stereo3d_create_side_data(out);
>          if (!stereo)
>              return AVERROR(ENOMEM);
>  
> -        switch (s->sei.frame_packing.arrangement_type) {
> +        switch (sei->frame_packing.arrangement_type) {
>          case 3:
> -            if (s->sei.frame_packing.quincunx_subsampling)
> +            if (sei->frame_packing.quincunx_subsampling)
>                  stereo->type = AV_STEREO3D_SIDEBYSIDE_QUINCUNX;
>              else
>                  stereo->type = AV_STEREO3D_SIDEBYSIDE;
> @@ -2748,21 +2746,21 @@ static int set_side_data(HEVCContext *s)
>              break;
>          }
>  
> -        if (s->sei.frame_packing.content_interpretation_type == 2)
> +        if (sei->frame_packing.content_interpretation_type == 2)
>              stereo->flags = AV_STEREO3D_FLAG_INVERT;
>  
> -        if (s->sei.frame_packing.arrangement_type == 5) {
> -            if (s->sei.frame_packing.current_frame_is_frame0_flag)
> +        if (sei->frame_packing.arrangement_type == 5) {
> +            if (sei->frame_packing.current_frame_is_frame0_flag)
>                  stereo->view = AV_STEREO3D_VIEW_LEFT;
>              else
>                  stereo->view = AV_STEREO3D_VIEW_RIGHT;
>          }
>      }
>  
> -    if (s->sei.display_orientation.present &&
> -        (s->sei.display_orientation.anticlockwise_rotation ||
> -         s->sei.display_orientation.hflip || s-
> >sei.display_orientation.vflip)) {
> -        double angle = s->sei.display_orientation.anticlockwise_rotation *
> 360 / (double) (1 << 16);
> +    if (sei->display_orientation.present &&
> +        (sei->display_orientation.anticlockwise_rotation ||
> +         sei->display_orientation.hflip || sei->display_orientation.vflip)) {
> +        double angle = sei->display_orientation.anticlockwise_rotation * 360
> / (double) (1 << 16);
>          AVFrameSideData *rotation = av_frame_new_side_data(out,
>                                                             AV_FRAME_DATA_DISP
> LAYMATRIX,
>                                                             sizeof(int32_t) *
> 9);
> @@ -2771,17 +2769,17 @@ static int set_side_data(HEVCContext *s)
>  
>          av_display_rotation_set((int32_t *)rotation->data, angle);
>          av_display_matrix_flip((int32_t *)rotation->data,
> -                               s->sei.display_orientation.hflip,
> -                               s->sei.display_orientation.vflip);
> +                               sei->display_orientation.hflip,
> +                               sei->display_orientation.vflip);
>      }
>  
>      // Decrement the mastering display flag when IRAP frame has
> no_rasl_output_flag=1
>      // so the side data persists for the entire coded video sequence.
> -    if (s->sei.mastering_display.present > 0 &&
> +    if (s && sei->mastering_display.present > 0 &&
>          IS_IRAP(s) && s->no_rasl_output_flag) {
> -        s->sei.mastering_display.present--;
> +        sei->mastering_display.present--;
>      }
> -    if (s->sei.mastering_display.present) {
> +    if (sei->mastering_display.present) {
>          // HEVC uses a g,b,r ordering, which we convert to a more natural
> r,g,b
>          const int mapping[3] = {2, 0, 1};
>          const int chroma_den = 50000;
> @@ -2794,25 +2792,25 @@ static int set_side_data(HEVCContext *s)
>  
>          for (i = 0; i < 3; i++) {
>              const int j = mapping[i];
> -            metadata->display_primaries[i][0].num = s-
> >sei.mastering_display.display_primaries[j][0];
> +            metadata->display_primaries[i][0].num = sei-
> >mastering_display.display_primaries[j][0];
>              metadata->display_primaries[i][0].den = chroma_den;
> -            metadata->display_primaries[i][1].num = s-
> >sei.mastering_display.display_primaries[j][1];
> +            metadata->display_primaries[i][1].num = sei-
> >mastering_display.display_primaries[j][1];
>              metadata->display_primaries[i][1].den = chroma_den;
>          }
> -        metadata->white_point[0].num = s-
> >sei.mastering_display.white_point[0];
> +        metadata->white_point[0].num = sei->mastering_display.white_point[0];
>          metadata->white_point[0].den = chroma_den;
> -        metadata->white_point[1].num = s-
> >sei.mastering_display.white_point[1];
> +        metadata->white_point[1].num = sei->mastering_display.white_point[1];
>          metadata->white_point[1].den = chroma_den;
>  
> -        metadata->max_luminance.num = s->sei.mastering_display.max_luminance;
> +        metadata->max_luminance.num = sei->mastering_display.max_luminance;
>          metadata->max_luminance.den = luma_den;
> -        metadata->min_luminance.num = s->sei.mastering_display.min_luminance;
> +        metadata->min_luminance.num = sei->mastering_display.min_luminance;
>          metadata->min_luminance.den = luma_den;
>          metadata->has_luminance = 1;
>          metadata->has_primaries = 1;
>  
> -        av_log(s->avctx, AV_LOG_DEBUG, "Mastering Display Metadata:\n");
> -        av_log(s->avctx, AV_LOG_DEBUG,
> +        av_log(logctx, AV_LOG_DEBUG, "Mastering Display Metadata:\n");
> +        av_log(logctx, AV_LOG_DEBUG,
>                 "r(%5.4f,%5.4f) g(%5.4f,%5.4f) b(%5.4f %5.4f) wp(%5.4f,
> %5.4f)\n",
>                 av_q2d(metadata->display_primaries[0][0]),
>                 av_q2d(metadata->display_primaries[0][1]),
> @@ -2821,31 +2819,31 @@ static int set_side_data(HEVCContext *s)
>                 av_q2d(metadata->display_primaries[2][0]),
>                 av_q2d(metadata->display_primaries[2][1]),
>                 av_q2d(metadata->white_point[0]), av_q2d(metadata-
> >white_point[1]));
> -        av_log(s->avctx, AV_LOG_DEBUG,
> +        av_log(logctx, AV_LOG_DEBUG,
>                 "min_luminance=%f, max_luminance=%f\n",
>                 av_q2d(metadata->min_luminance), av_q2d(metadata-
> >max_luminance));
>      }
>      // Decrement the mastering display flag when IRAP frame has
> no_rasl_output_flag=1
>      // so the side data persists for the entire coded video sequence.
> -    if (s->sei.content_light.present > 0 &&
> +    if (s && sei->content_light.present > 0 &&
>          IS_IRAP(s) && s->no_rasl_output_flag) {
> -        s->sei.content_light.present--;
> +        sei->content_light.present--;
>      }
> -    if (s->sei.content_light.present) {
> +    if (sei->content_light.present) {
>          AVContentLightMetadata *metadata =
>              av_content_light_metadata_create_side_data(out);
>          if (!metadata)
>              return AVERROR(ENOMEM);
> -        metadata->MaxCLL  = s->sei.content_light.max_content_light_level;
> -        metadata->MaxFALL = s->sei.content_light.max_pic_average_light_level;
> +        metadata->MaxCLL  = sei->content_light.max_content_light_level;
> +        metadata->MaxFALL = sei->content_light.max_pic_average_light_level;
>  
> -        av_log(s->avctx, AV_LOG_DEBUG, "Content Light Level Metadata:\n");
> -        av_log(s->avctx, AV_LOG_DEBUG, "MaxCLL=%d, MaxFALL=%d\n",
> +        av_log(logctx, AV_LOG_DEBUG, "Content Light Level Metadata:\n");
> +        av_log(logctx, AV_LOG_DEBUG, "MaxCLL=%d, MaxFALL=%d\n",
>                 metadata->MaxCLL, metadata->MaxFALL);
>      }
>  
> -    if (s->sei.a53_caption.buf_ref) {
> -        HEVCSEIA53Caption *a53 = &s->sei.a53_caption;
> +    if (sei->a53_caption.buf_ref) {
> +        HEVCSEIA53Caption *a53 = &sei->a53_caption;
>  
>          AVFrameSideData *sd = av_frame_new_side_data_from_buf(out,
> AV_FRAME_DATA_A53_CC, a53->buf_ref);
>          if (!sd)
> @@ -2853,8 +2851,8 @@ static int set_side_data(HEVCContext *s)
>          a53->buf_ref = NULL;
>      }
>  
> -    for (int i = 0; i < s->sei.unregistered.nb_buf_ref; i++) {
> -        HEVCSEIUnregistered *unreg = &s->sei.unregistered;
> +    for (int i = 0; i < sei->unregistered.nb_buf_ref; i++) {
> +        HEVCSEIUnregistered *unreg = &sei->unregistered;
>  
>          if (unreg->buf_ref[i]) {
>              AVFrameSideData *sd = av_frame_new_side_data_from_buf(out,
> @@ -2865,9 +2863,9 @@ static int set_side_data(HEVCContext *s)
>              unreg->buf_ref[i] = NULL;
>          }
>      }
> -    s->sei.unregistered.nb_buf_ref = 0;
> +    sei->unregistered.nb_buf_ref = 0;
>  
> -    if (s->sei.timecode.present) {
> +    if (s && sei->timecode.present) {
>          uint32_t *tc_sd;
>          char tcbuf[AV_TIMECODE_STR_SIZE];
>          AVFrameSideData *tcside = av_frame_new_side_data(out,
> AV_FRAME_DATA_S12M_TIMECODE,
> @@ -2876,25 +2874,25 @@ static int set_side_data(HEVCContext *s)
>              return AVERROR(ENOMEM);
>  
>          tc_sd = (uint32_t*)tcside->data;
> -        tc_sd[0] = s->sei.timecode.num_clock_ts;
> +        tc_sd[0] = sei->timecode.num_clock_ts;
>  
>          for (int i = 0; i < tc_sd[0]; i++) {
> -            int drop = s->sei.timecode.cnt_dropped_flag[i];
> -            int   hh = s->sei.timecode.hours_value[i];
> -            int   mm = s->sei.timecode.minutes_value[i];
> -            int   ss = s->sei.timecode.seconds_value[i];
> -            int   ff = s->sei.timecode.n_frames[i];
> +            int drop = sei->timecode.cnt_dropped_flag[i];
> +            int   hh = sei->timecode.hours_value[i];
> +            int   mm = sei->timecode.minutes_value[i];
> +            int   ss = sei->timecode.seconds_value[i];
> +            int   ff = sei->timecode.n_frames[i];
>  
>              tc_sd[i + 1] = av_timecode_get_smpte(s->avctx->framerate, drop,
> hh, mm, ss, ff);
>              av_timecode_make_smpte_tc_string2(tcbuf, s->avctx->framerate,
> tc_sd[i + 1], 0, 0);
>              av_dict_set(&out->metadata, "timecode", tcbuf, 0);
>          }
>  
> -        s->sei.timecode.num_clock_ts = 0;
> +        sei->timecode.num_clock_ts = 0;
>      }
>  
> -    if (s->sei.film_grain_characteristics.present) {
> -        HEVCSEIFilmGrainCharacteristics *fgc = &s-
> >sei.film_grain_characteristics;
> +    if (s && sei->film_grain_characteristics.present) {
> +        HEVCSEIFilmGrainCharacteristics *fgc = &sei-
> >film_grain_characteristics;
>          AVFilmGrainParams *fgp = av_film_grain_params_create_side_data(out);
>          if (!fgp)
>              return AVERROR(ENOMEM);
> @@ -2948,8 +2946,8 @@ static int set_side_data(HEVCContext *s)
>          fgc->present = fgc->persistence_flag;
>      }
>  
> -    if (s->sei.dynamic_hdr_plus.info) {
> -        AVBufferRef *info_ref = av_buffer_ref(s->sei.dynamic_hdr_plus.info);
> +    if (sei->dynamic_hdr_plus.info) {
> +        AVBufferRef *info_ref = av_buffer_ref(sei->dynamic_hdr_plus.info);
>          if (!info_ref)
>              return AVERROR(ENOMEM);
>  
> @@ -2959,7 +2957,7 @@ static int set_side_data(HEVCContext *s)
>          }
>      }
>  
> -    if (s->rpu_buf) {
> +    if (s && s->rpu_buf) {
>          AVFrameSideData *rpu = av_frame_new_side_data_from_buf(out,
> AV_FRAME_DATA_DOVI_RPU_BUFFER, s->rpu_buf);
>          if (!rpu)
>              return AVERROR(ENOMEM);
> @@ -3015,7 +3013,7 @@ static int hevc_frame_start(HEVCContext *s)
>              goto fail;
>      }
>  
> -    ret = set_side_data(s);
> +    ret = ff_set_side_data(s->avctx, &s->sei, s, s->ref->frame);
>      if (ret < 0)
>          goto fail;
>  
> diff --git a/libavcodec/hevcdec.h b/libavcodec/hevcdec.h
> index 870ff178d4..b0d2f2e169 100644
> --- a/libavcodec/hevcdec.h
> +++ b/libavcodec/hevcdec.h
> @@ -691,6 +691,8 @@ void ff_hevc_hls_residual_coding(HEVCContext *s, int x0,
> int y0,
>  
>  void ff_hevc_hls_mvd_coding(HEVCContext *s, int x0, int y0, int
> log2_cb_size);
>  
> +int ff_set_side_data(AVCodecContext *logctx, HEVCSEI *sei, HEVCContext *s,
> AVFrame *out);
> +
>  extern const uint8_t ff_hevc_qpel_extra_before[4];
>  extern const uint8_t ff_hevc_qpel_extra_after[4];
>  extern const uint8_t ff_hevc_qpel_extra[4];
_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel

To unsubscribe, visit link above, or email
ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".

       reply	other threads:[~2021-12-29  6:11 UTC|newest]

Thread overview: 2+ messages / expand[flat|nested]  mbox.gz  Atom feed  top
     [not found] <cover.1638210102.git.softworkz@hotmail.com>
     [not found] ` <62b3a721e463650b46e527abcc9169ec1cc3e9f8.1638210102.git.softworkz@hotmail.com>
     [not found]   ` <DM8P223MB0365431A9992DAC3E7C4769ABA669@DM8P223MB0365.NAMP223.PROD.OUTLOOK.COM>
2021-12-29  6:10     ` Xiang, Haihao [this message]
     [not found]   ` <2362f84d0e53c691dbf248743c6640c91d83a491.1638210102.git.softworkz@hotmail.com>
     [not found]     ` <5a3b242f9bedd7453a810eae1edf78918726d12c.1638210102.git.softworkz@hotmail.com>
     [not found]       ` <DM8P223MB03655F44A7BF01132BB2959DBA669@DM8P223MB0365.NAMP223.PROD.OUTLOOK.COM>
2021-12-29  6:26         ` [FFmpeg-devel] [PATCH 4/5] avcodec/qsvdec: Implement SEI parsing for QSV decoders Xiang, Haihao

Reply instructions:

You may reply publicly to this message via plain-text email
using any one of the following methods:

* Save the following mbox file, import it into your mail client,
  and reply-to-all from there: mbox

  Avoid top-posting and favor interleaved quoting:
  https://en.wikipedia.org/wiki/Posting_style#Interleaved_style

* Reply using the --to, --cc, and --in-reply-to
  switches of git-send-email(1):

  git send-email \
    --in-reply-to=241786aa97219943c2679ab7d32ab91fc2866c77.camel@intel.com \
    --to=haihao.xiang-at-intel.com@ffmpeg.org \
    --cc=ffmpeg-devel@ffmpeg.org \
    /path/to/YOUR_REPLY

  https://kernel.org/pub/software/scm/git/docs/git-send-email.html

* If your mail client supports setting the In-Reply-To header
  via mailto: links, try the mailto: link

Git Inbox Mirror of the ffmpeg-devel mailing list - see https://ffmpeg.org/mailman/listinfo/ffmpeg-devel

This inbox may be cloned and mirrored by anyone:

	git clone --mirror https://master.gitmailbox.com/ffmpegdev/0 ffmpegdev/git/0.git

	# If you have public-inbox 1.1+ installed, you may
	# initialize and index your mirror using the following commands:
	public-inbox-init -V2 ffmpegdev ffmpegdev/ https://master.gitmailbox.com/ffmpegdev \
		ffmpegdev@gitmailbox.com
	public-inbox-index ffmpegdev

Example config snippet for mirrors.


AGPL code for this site: git clone https://public-inbox.org/public-inbox.git