* [FFmpeg-devel] [PATCH] avfilter: add vf_yazf filter
@ 2025-05-31 14:49 Quentin RENARD
2025-06-01 8:21 ` Gyan Doshi
0 siblings, 1 reply; 5+ messages in thread
From: Quentin RENARD @ 2025-05-31 14:49 UTC (permalink / raw)
To: ffmpeg-devel
zoompan filter with floating point precision
Signed-off-by: Quentin Renard <contact-github@asticode.com>
---
Changelog | 1 +
doc/filters.texi | 59 +++++++
libavfilter/Makefile | 1 +
libavfilter/allfilters.c | 1 +
libavfilter/vf_yazf.c | 360 +++++++++++++++++++++++++++++++++++++++
5 files changed, 422 insertions(+)
create mode 100644 libavfilter/vf_yazf.c
diff --git a/Changelog b/Changelog
index 4217449438..632d0e03d1 100644
--- a/Changelog
+++ b/Changelog
@@ -18,6 +18,7 @@ version <next>:
- APV encoding support through a libopenapv wrapper
- VVC decoder supports all content of SCC (Screen Content Coding):
IBC (Inter Block Copy), Palette Mode and ACT (Adaptive Color Transform
+- yazf filter
version 7.1:
diff --git a/doc/filters.texi b/doc/filters.texi
index 63f55f5794..4fa5be9e94 100644
--- a/doc/filters.texi
+++ b/doc/filters.texi
@@ -25986,6 +25986,65 @@ Set blur strength. Default value is 128.
@subsection Commands
This filter supports same @ref{commands} as options.
+@section yazf
+
+Apply Zoom & Pan effect with floating point precision ("yazf" means "yet another zoompan filter").
+
+This filter accepts the following options:
+
+@table @option
+@item z
+Set the zoom expression. Range is 1-10. Default is 1.
+
+@item x
+@item y
+Set the x and y expression. Default is 0.
+
+@item w
+@item h
+Set the output frame width and height expression. Default is 1.
+@end table
+
+Each expression can contain the following constants:
+
+@table @option
+@item in_w, iw
+Input width.
+
+@item in_h, ih
+Input height.
+
+@item n
+Input frame count.
+
+@item t
+The input timestamp expressed in seconds.
+@end table
+
+Additionally x and y expressions can contain the following constants:
+
+@table @option
+@item z
+Last calculated zoom from 'z' expression for current input frame.
+
+@item zw
+Last calculated zoom width for current input frame.
+
+@item zh
+Last calculated zoom height for current input frame.
+@end table
+
+@subsection Examples
+
+@itemize
+@item
+Zoom in 2x into center of picture for first 30 frames:
+@example
+yazf=x='(iw/2)-(zw/2)':y='(ih/2)-(zh/2)':z='min(2, 1+n*/30)':w=1080:h=1080"
+@end example
+
+@end itemize
+
@section zoompan
Apply Zoom & Pan effect.
diff --git a/libavfilter/Makefile b/libavfilter/Makefile
index 97f8f17272..409a9f10dc 100644
--- a/libavfilter/Makefile
+++ b/libavfilter/Makefile
@@ -582,6 +582,7 @@ OBJS-$(CONFIG_YADIF_VIDEOTOOLBOX_FILTER) += vf_yadif_videotoolbox.o \
metal/utils.o \
yadif_common.o
OBJS-$(CONFIG_YAEPBLUR_FILTER) += vf_yaepblur.o
+OBJS-$(CONFIG_YAZF_FILTER) += vf_yazf.o
OBJS-$(CONFIG_ZMQ_FILTER) += f_zmq.o
OBJS-$(CONFIG_ZOOMPAN_FILTER) += vf_zoompan.o
OBJS-$(CONFIG_ZSCALE_FILTER) += vf_zscale.o
diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
index 3bc045b28f..5fc80303f5 100644
--- a/libavfilter/allfilters.c
+++ b/libavfilter/allfilters.c
@@ -544,6 +544,7 @@ extern const FFFilter ff_vf_yadif;
extern const FFFilter ff_vf_yadif_cuda;
extern const FFFilter ff_vf_yadif_videotoolbox;
extern const FFFilter ff_vf_yaepblur;
+extern const FFFilter ff_vf_yazf;
extern const FFFilter ff_vf_zmq;
extern const FFFilter ff_vf_zoompan;
extern const FFFilter ff_vf_zscale;
diff --git a/libavfilter/vf_yazf.c b/libavfilter/vf_yazf.c
new file mode 100644
index 0000000000..36c158fd32
--- /dev/null
+++ b/libavfilter/vf_yazf.c
@@ -0,0 +1,360 @@
+/*
+ * Copyright (c) 2025 Quentin Renard
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavutil/eval.h"
+#include "libavutil/opt.h"
+#include "video.h"
+
+static const char *const var_names[] = {
+ "in_w", "iw",
+ "in_h", "ih",
+ "z",
+ "zw",
+ "zh",
+ "n",
+ "t",
+ NULL
+};
+
+enum var_name {
+ VAR_IN_W, VAR_IW,
+ VAR_IN_H, VAR_IH,
+ VAR_Z,
+ VAR_ZW,
+ VAR_ZH,
+ VAR_N,
+ VAR_T,
+ VARS_NB
+};
+
+typedef struct YAZFContext {
+ const AVClass *class;
+ char *x_expr_str, *y_expr_str, *w_expr_str, *h_expr_str, *zoom_expr_str;
+ AVExpr *x_expr, *y_expr, *w_expr, *h_expr, *zoom_expr;
+ double var_values[VARS_NB];
+} YAZFContext;
+
+typedef struct ThreadData {
+ AVFrame *in, *out;
+ float crop_h, crop_x, crop_y, crop_w;
+ int w, h;
+} ThreadData;
+
+static av_cold int init(AVFilterContext *ctx)
+{
+ YAZFContext *s = ctx->priv;
+ int ret;
+
+ ret = av_expr_parse(&s->x_expr, s->x_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
+ if (ret < 0)
+ return ret;
+
+ ret = av_expr_parse(&s->y_expr, s->y_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
+ if (ret < 0)
+ return ret;
+
+ ret = av_expr_parse(&s->w_expr, s->w_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
+ if (ret < 0)
+ return ret;
+
+ ret = av_expr_parse(&s->h_expr, s->h_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
+ if (ret < 0)
+ return ret;
+
+ ret = av_expr_parse(&s->zoom_expr, s->zoom_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
+ if (ret < 0)
+ return ret;
+
+ return 0;
+}
+
+static int config_props(AVFilterLink *outlink)
+{
+ AVFilterContext *ctx = outlink->src;
+ AVFilterLink *inlink = ctx->inputs[0];
+ YAZFContext *s = ctx->priv;
+
+ s->var_values[VAR_IN_W] = s->var_values[VAR_IW] = inlink->w;
+ s->var_values[VAR_IN_H] = s->var_values[VAR_IH] = inlink->h;
+
+ outlink->w = FFMAX(av_expr_eval(s->w_expr, s->var_values, NULL), 1);
+ outlink->h = FFMAX(av_expr_eval(s->h_expr, s->var_values, NULL), 1);
+ return 0;
+}
+
+static inline uint8_t zoompan_pixel(const uint8_t *src, const int src_stride,
+ const int src_w, const int src_h,
+ const float x, const float y)
+{
+ int x0 = (int)floorf(x);
+ int y0 = (int)floorf(y);
+ int x1 = x0 + 1;
+ int y1 = y0 + 1;
+
+ float fx = x - x0;
+ float fy = y - y0;
+
+ x0 = FFMAX(0, FFMIN(x0, src_w - 1));
+ x1 = FFMAX(0, FFMIN(x1, src_w - 1));
+ y0 = FFMAX(0, FFMIN(y0, src_h - 1));
+ y1 = FFMAX(0, FFMIN(y1, src_h - 1));
+
+ float p00 = src[y0 * src_stride + x0];
+ float p10 = src[y0 * src_stride + x1];
+ float p01 = src[y1 * src_stride + x0];
+ float p11 = src[y1 * src_stride + x1];
+
+ return (1 - fx) * (1 - fy) * p00 +
+ fx * (1 - fy) * p10 +
+ (1 - fx) * fy * p01 +
+ fx * fy * p11;
+}
+
+static void zoompan_plane(const uint8_t *src, const int src_stride,
+ const int src_w, const int src_h, const float crop_x,
+ const float crop_y, const float crop_w,
+ const float crop_h, uint8_t *dst,
+ const int dst_stride, const int dst_w, const int dst_h,
+ const int dst_y_start, const int dst_y_end)
+{
+ float u, v, x, y, val;
+ for (int dst_y = dst_y_start; dst_y < dst_y_end; dst_y++) {
+ for (int dst_x = 0; dst_x < dst_w; dst_x++) {
+ u = (dst_w > 1) ? (float)dst_x / (float)(dst_w - 1) : 0.0f;
+ v = (dst_h > 1) ? (float)dst_y / (float)(dst_h - 1) : 0.0f;
+
+ x = crop_x + u * crop_w;
+ y = crop_y + v * crop_h;
+
+ val = zoompan_pixel(src, src_stride, src_w, src_h, x, y);
+
+ dst[dst_y * dst_stride + dst_x] = FFMIN(FFMAX(val, 0), 255);
+ }
+ }
+}
+
+static int zoompan_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
+{
+ AVFilterLink *inlink = ctx->inputs[0];
+ ThreadData *td = arg;
+
+ const int slice_start = (td->out->height * jobnr) / nb_jobs;
+ const int slice_end = (td->out->height * (jobnr+1)) / nb_jobs;
+
+ int nb_planes = 4;
+ struct {
+ int resolution_w;
+ int resolution_h;
+ } planes[4] = {
+ {.resolution_h = 1, .resolution_w = 1},
+ {.resolution_h = 1, .resolution_w = 1},
+ {.resolution_h = 1, .resolution_w = 1},
+ {.resolution_h = 1, .resolution_w = 1},
+ };
+ switch (inlink->format) {
+ case AV_PIX_FMT_YUV410P:
+ nb_planes = 3;
+ planes[1].resolution_h = 4;
+ planes[1].resolution_w = 4;
+ planes[2].resolution_h = 4;
+ planes[2].resolution_w = 4;
+ break;
+ case AV_PIX_FMT_YUV411P:
+ case AV_PIX_FMT_YUVJ411P:
+ nb_planes = 3;
+ planes[1].resolution_w = 4;
+ planes[2].resolution_w = 4;
+ break;
+ case AV_PIX_FMT_YUV420P:
+ case AV_PIX_FMT_YUVJ420P:
+ nb_planes = 3;
+ planes[1].resolution_h = 2;
+ planes[1].resolution_w = 2;
+ planes[2].resolution_h = 2;
+ planes[2].resolution_w = 2;
+ break;
+ case AV_PIX_FMT_YUVA420P:
+ nb_planes = 4;
+ planes[1].resolution_h = 2;
+ planes[1].resolution_w = 2;
+ planes[2].resolution_h = 2;
+ planes[2].resolution_w = 2;
+ break;
+ case AV_PIX_FMT_YUV422P:
+ case AV_PIX_FMT_YUVJ422P:
+ nb_planes = 3;
+ planes[1].resolution_w = 2;
+ planes[2].resolution_w = 2;
+ break;
+ case AV_PIX_FMT_YUVA422P:
+ nb_planes = 4;
+ planes[1].resolution_w = 2;
+ planes[2].resolution_w = 2;
+ break;
+ case AV_PIX_FMT_YUV440P:
+ case AV_PIX_FMT_YUVJ440P:
+ nb_planes = 3;
+ planes[1].resolution_h = 2;
+ planes[2].resolution_h = 2;
+ break;
+ case AV_PIX_FMT_YUV444P:
+ case AV_PIX_FMT_YUVJ444P:
+ nb_planes = 3;
+ break;
+ case AV_PIX_FMT_YUVA444P:
+ nb_planes = 4;
+ break;
+ }
+
+ for (int i = 0; i < nb_planes; i++) {
+ zoompan_plane(td->in->data[i], td->in->linesize[i], td->in->width/planes[i].resolution_w,
+ td->in->height/planes[i].resolution_h, td->crop_x/planes[i].resolution_w,
+ td->crop_y/planes[i].resolution_h, td->crop_w/planes[i].resolution_w,
+ td->crop_h/planes[i].resolution_h, td->out->data[i], td->out->linesize[i],
+ td->out->width/planes[i].resolution_w, td->out->height/planes[i].resolution_h,
+ slice_start/planes[i].resolution_h, slice_end/planes[i].resolution_h);
+ }
+ return 0;
+}
+
+static int filter_frame(AVFilterLink *inlink, AVFrame *in)
+{
+ AVFilterContext *ctx = inlink->dst;
+ YAZFContext *s = ctx->priv;
+ AVFilterLink *outlink = ctx->outputs[0];
+ int ret;
+ AVFrame *out = NULL;
+ ThreadData td;
+ float zoom, a;
+
+ inlink->w = in->width;
+ inlink->h = in->height;
+ s->var_values[VAR_N] = ff_filter_link(inlink)->frame_count_out;
+ s->var_values[VAR_T] = TS2T(in->pts, inlink->time_base);
+
+ if ((ret = config_props(outlink)) < 0)
+ goto err;
+
+ td.w = outlink->w;
+ td.h = outlink->h;
+ a = (float)outlink->w / (float)outlink->h;
+
+ s->var_values[VAR_Z] = zoom = av_clipd(av_expr_eval(s->zoom_expr, s->var_values, NULL), 1, 10);
+
+ td.crop_w = (float)inlink->w / zoom;
+ td.crop_h = td.crop_w / a;
+ if (td.crop_h > inlink->h) {
+ td.crop_h = inlink->h;
+ td.crop_w = td.crop_h * a;
+ }
+ s->var_values[VAR_ZW] = td.crop_w;
+ s->var_values[VAR_ZH] = td.crop_h;
+
+ td.crop_x = av_clipd(av_expr_eval(s->x_expr, s->var_values, NULL), 0, FFMAX(inlink->w - td.crop_w, 0));
+ td.crop_y = av_clipd(av_expr_eval(s->y_expr, s->var_values, NULL), 0, FFMAX(inlink->h - td.crop_h, 0));
+
+ out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
+ if (!out) {
+ ret = AVERROR(ENOMEM);
+ goto err;
+ }
+
+ if ((ret = av_frame_copy_props(out, in)) < 0)
+ goto err;
+
+ td.out = out, td.in = in;
+ if ((ret = ff_filter_execute(ctx, zoompan_slice, &td, NULL,
+ FFMIN(td.h, ff_filter_get_nb_threads(ctx)))) < 0)
+ goto err;
+
+ av_frame_free(&in);
+ return ff_filter_frame(outlink, out);
+
+err:
+ av_frame_free(&in);
+ av_frame_free(&out);
+ return ret;
+}
+
+static const enum AVPixelFormat pix_fmts[] = {
+ AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,
+ AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P,
+ AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
+ AV_PIX_FMT_YUVJ411P, AV_PIX_FMT_YUVJ420P,
+ AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ440P,
+ AV_PIX_FMT_YUVJ444P,
+ AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P,
+ AV_PIX_FMT_YUVA444P,
+ AV_PIX_FMT_NONE
+};
+
+static av_cold void uninit(AVFilterContext *ctx)
+{
+ YAZFContext *s = ctx->priv;
+
+ av_expr_free(s->x_expr);
+ av_expr_free(s->y_expr);
+ av_expr_free(s->zoom_expr);
+ av_expr_free(s->w_expr);
+ av_expr_free(s->h_expr);
+}
+
+#define OFFSET(x) offsetof(YAZFContext, x)
+#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
+
+static const AVOption yazf_options[] = {
+ { "z", "set the zoom expression", OFFSET(zoom_expr_str), AV_OPT_TYPE_STRING, {.str = "1" }, .flags = FLAGS },
+ { "x", "set the zoom x expression", OFFSET(x_expr_str), AV_OPT_TYPE_STRING, {.str = "0" }, .flags = FLAGS },
+ { "y", "set the zoom y expression", OFFSET(y_expr_str), AV_OPT_TYPE_STRING, {.str = "0" }, .flags = FLAGS },
+ { "w", "set the output w expression", OFFSET(w_expr_str), AV_OPT_TYPE_STRING, {.str = "1" }, .flags = FLAGS },
+ { "h", "set the output h expression", OFFSET(h_expr_str), AV_OPT_TYPE_STRING, {.str = "1" }, .flags = FLAGS },
+ { NULL }
+};
+
+AVFILTER_DEFINE_CLASS(yazf);
+
+static const AVFilterPad avfilter_vf_yazf_inputs[] = {
+ {
+ .name = "default",
+ .type = AVMEDIA_TYPE_VIDEO,
+ .filter_frame = filter_frame,
+ },
+};
+
+static const AVFilterPad avfilter_vf_yazf_outputs[] = {
+ {
+ .name = "default",
+ .type = AVMEDIA_TYPE_VIDEO,
+ .config_props = config_props,
+ },
+};
+
+const FFFilter ff_vf_yazf = {
+ .p.name = "yazf",
+ .p.description = NULL_IF_CONFIG_SMALL("Apply Zoom & Pan effect with floating point precision."),
+ .p.priv_class = &yazf_class,
+ .p.flags = AVFILTER_FLAG_SLICE_THREADS,
+ .init = init,
+ .priv_size = sizeof(YAZFContext),
+ .uninit = uninit,
+ FILTER_INPUTS(avfilter_vf_yazf_inputs),
+ FILTER_OUTPUTS(avfilter_vf_yazf_outputs),
+ FILTER_PIXFMTS_ARRAY(pix_fmts),
+};
--
2.39.1
_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
To unsubscribe, visit link above, or email
ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
^ permalink raw reply [flat|nested] 5+ messages in thread
* Re: [FFmpeg-devel] [PATCH] avfilter: add vf_yazf filter
2025-05-31 14:49 [FFmpeg-devel] [PATCH] avfilter: add vf_yazf filter Quentin RENARD
@ 2025-06-01 8:21 ` Gyan Doshi
2025-06-01 22:13 ` Michael Niedermayer
0 siblings, 1 reply; 5+ messages in thread
From: Gyan Doshi @ 2025-06-01 8:21 UTC (permalink / raw)
To: ffmpeg-devel
On 2025-05-31 08:19 pm, Quentin RENARD wrote:
> zoompan filter with floating point precision
If this filter is meant to be same as existing zoompan but more precise,
then you should modify the original filter with a mode option for FP use.
Regards,
Gyan
>
>
>
> Signed-off-by: Quentin Renard <contact-github@asticode.com>
>
> ---
>
> Changelog | 1 +
>
> doc/filters.texi | 59 +++++++
>
> libavfilter/Makefile | 1 +
>
> libavfilter/allfilters.c | 1 +
>
> libavfilter/vf_yazf.c | 360 +++++++++++++++++++++++++++++++++++++++
>
> 5 files changed, 422 insertions(+)
>
> create mode 100644 libavfilter/vf_yazf.c
>
>
>
> diff --git a/Changelog b/Changelog
>
> index 4217449438..632d0e03d1 100644
>
> --- a/Changelog
>
> +++ b/Changelog
>
> @@ -18,6 +18,7 @@ version <next>:
>
> - APV encoding support through a libopenapv wrapper
>
> - VVC decoder supports all content of SCC (Screen Content Coding):
>
> IBC (Inter Block Copy), Palette Mode and ACT (Adaptive Color Transform
>
> +- yazf filter
>
>
>
>
>
> version 7.1:
>
> diff --git a/doc/filters.texi b/doc/filters.texi
>
> index 63f55f5794..4fa5be9e94 100644
>
> --- a/doc/filters.texi
>
> +++ b/doc/filters.texi
>
> @@ -25986,6 +25986,65 @@ Set blur strength. Default value is 128.
>
> @subsection Commands
>
> This filter supports same @ref{commands} as options.
>
>
>
> +@section yazf
>
> +
>
> +Apply Zoom & Pan effect with floating point precision ("yazf" means "yet another zoompan filter").
>
> +
>
> +This filter accepts the following options:
>
> +
>
> +@table @option
>
> +@item z
>
> +Set the zoom expression. Range is 1-10. Default is 1.
>
> +
>
> +@item x
>
> +@item y
>
> +Set the x and y expression. Default is 0.
>
> +
>
> +@item w
>
> +@item h
>
> +Set the output frame width and height expression. Default is 1.
>
> +@end table
>
> +
>
> +Each expression can contain the following constants:
>
> +
>
> +@table @option
>
> +@item in_w, iw
>
> +Input width.
>
> +
>
> +@item in_h, ih
>
> +Input height.
>
> +
>
> +@item n
>
> +Input frame count.
>
> +
>
> +@item t
>
> +The input timestamp expressed in seconds.
>
> +@end table
>
> +
>
> +Additionally x and y expressions can contain the following constants:
>
> +
>
> +@table @option
>
> +@item z
>
> +Last calculated zoom from 'z' expression for current input frame.
>
> +
>
> +@item zw
>
> +Last calculated zoom width for current input frame.
>
> +
>
> +@item zh
>
> +Last calculated zoom height for current input frame.
>
> +@end table
>
> +
>
> +@subsection Examples
>
> +
>
> +@itemize
>
> +@item
>
> +Zoom in 2x into center of picture for first 30 frames:
>
> +@example
>
> +yazf=x='(iw/2)-(zw/2)':y='(ih/2)-(zh/2)':z='min(2, 1+n*/30)':w=1080:h=1080"
>
> +@end example
>
> +
>
> +@end itemize
>
> +
>
> @section zoompan
>
>
>
> Apply Zoom & Pan effect.
>
> diff --git a/libavfilter/Makefile b/libavfilter/Makefile
>
> index 97f8f17272..409a9f10dc 100644
>
> --- a/libavfilter/Makefile
>
> +++ b/libavfilter/Makefile
>
> @@ -582,6 +582,7 @@ OBJS-$(CONFIG_YADIF_VIDEOTOOLBOX_FILTER) += vf_yadif_videotoolbox.o \
>
> metal/utils.o \
>
> yadif_common.o
>
> OBJS-$(CONFIG_YAEPBLUR_FILTER) += vf_yaepblur.o
>
> +OBJS-$(CONFIG_YAZF_FILTER) += vf_yazf.o
>
> OBJS-$(CONFIG_ZMQ_FILTER) += f_zmq.o
>
> OBJS-$(CONFIG_ZOOMPAN_FILTER) += vf_zoompan.o
>
> OBJS-$(CONFIG_ZSCALE_FILTER) += vf_zscale.o
>
> diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
>
> index 3bc045b28f..5fc80303f5 100644
>
> --- a/libavfilter/allfilters.c
>
> +++ b/libavfilter/allfilters.c
>
> @@ -544,6 +544,7 @@ extern const FFFilter ff_vf_yadif;
>
> extern const FFFilter ff_vf_yadif_cuda;
>
> extern const FFFilter ff_vf_yadif_videotoolbox;
>
> extern const FFFilter ff_vf_yaepblur;
>
> +extern const FFFilter ff_vf_yazf;
>
> extern const FFFilter ff_vf_zmq;
>
> extern const FFFilter ff_vf_zoompan;
>
> extern const FFFilter ff_vf_zscale;
>
> diff --git a/libavfilter/vf_yazf.c b/libavfilter/vf_yazf.c
>
> new file mode 100644
>
> index 0000000000..36c158fd32
>
> --- /dev/null
>
> +++ b/libavfilter/vf_yazf.c
>
> @@ -0,0 +1,360 @@
>
> +/*
>
> + * Copyright (c) 2025 Quentin Renard
>
> + *
>
> + * This file is part of FFmpeg.
>
> + *
>
> + * FFmpeg is free software; you can redistribute it and/or
>
> + * modify it under the terms of the GNU Lesser General Public
>
> + * License as published by the Free Software Foundation; either
>
> + * version 2.1 of the License, or (at your option) any later version.
>
> + *
>
> + * FFmpeg is distributed in the hope that it will be useful,
>
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
>
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
>
> + * Lesser General Public License for more details.
>
> + *
>
> + * You should have received a copy of the GNU Lesser General Public
>
> + * License along with FFmpeg; if not, write to the Free Software
>
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
>
> + */
>
> +
>
> +#include "libavutil/eval.h"
>
> +#include "libavutil/opt.h"
>
> +#include "video.h"
>
> +
>
> +static const char *const var_names[] = {
>
> + "in_w", "iw",
>
> + "in_h", "ih",
>
> + "z",
>
> + "zw",
>
> + "zh",
>
> + "n",
>
> + "t",
>
> + NULL
>
> +};
>
> +
>
> +enum var_name {
>
> + VAR_IN_W, VAR_IW,
>
> + VAR_IN_H, VAR_IH,
>
> + VAR_Z,
>
> + VAR_ZW,
>
> + VAR_ZH,
>
> + VAR_N,
>
> + VAR_T,
>
> + VARS_NB
>
> +};
>
> +
>
> +typedef struct YAZFContext {
>
> + const AVClass *class;
>
> + char *x_expr_str, *y_expr_str, *w_expr_str, *h_expr_str, *zoom_expr_str;
>
> + AVExpr *x_expr, *y_expr, *w_expr, *h_expr, *zoom_expr;
>
> + double var_values[VARS_NB];
>
> +} YAZFContext;
>
> +
>
> +typedef struct ThreadData {
>
> + AVFrame *in, *out;
>
> + float crop_h, crop_x, crop_y, crop_w;
>
> + int w, h;
>
> +} ThreadData;
>
> +
>
> +static av_cold int init(AVFilterContext *ctx)
>
> +{
>
> + YAZFContext *s = ctx->priv;
>
> + int ret;
>
> +
>
> + ret = av_expr_parse(&s->x_expr, s->x_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
>
> + if (ret < 0)
>
> + return ret;
>
> +
>
> + ret = av_expr_parse(&s->y_expr, s->y_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
>
> + if (ret < 0)
>
> + return ret;
>
> +
>
> + ret = av_expr_parse(&s->w_expr, s->w_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
>
> + if (ret < 0)
>
> + return ret;
>
> +
>
> + ret = av_expr_parse(&s->h_expr, s->h_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
>
> + if (ret < 0)
>
> + return ret;
>
> +
>
> + ret = av_expr_parse(&s->zoom_expr, s->zoom_expr_str, var_names, NULL, NULL, NULL, NULL, 0, ctx);
>
> + if (ret < 0)
>
> + return ret;
>
> +
>
> + return 0;
>
> +}
>
> +
>
> +static int config_props(AVFilterLink *outlink)
>
> +{
>
> + AVFilterContext *ctx = outlink->src;
>
> + AVFilterLink *inlink = ctx->inputs[0];
>
> + YAZFContext *s = ctx->priv;
>
> +
>
> + s->var_values[VAR_IN_W] = s->var_values[VAR_IW] = inlink->w;
>
> + s->var_values[VAR_IN_H] = s->var_values[VAR_IH] = inlink->h;
>
> +
>
> + outlink->w = FFMAX(av_expr_eval(s->w_expr, s->var_values, NULL), 1);
>
> + outlink->h = FFMAX(av_expr_eval(s->h_expr, s->var_values, NULL), 1);
>
> + return 0;
>
> +}
>
> +
>
> +static inline uint8_t zoompan_pixel(const uint8_t *src, const int src_stride,
>
> + const int src_w, const int src_h,
>
> + const float x, const float y)
>
> +{
>
> + int x0 = (int)floorf(x);
>
> + int y0 = (int)floorf(y);
>
> + int x1 = x0 + 1;
>
> + int y1 = y0 + 1;
>
> +
>
> + float fx = x - x0;
>
> + float fy = y - y0;
>
> +
>
> + x0 = FFMAX(0, FFMIN(x0, src_w - 1));
>
> + x1 = FFMAX(0, FFMIN(x1, src_w - 1));
>
> + y0 = FFMAX(0, FFMIN(y0, src_h - 1));
>
> + y1 = FFMAX(0, FFMIN(y1, src_h - 1));
>
> +
>
> + float p00 = src[y0 * src_stride + x0];
>
> + float p10 = src[y0 * src_stride + x1];
>
> + float p01 = src[y1 * src_stride + x0];
>
> + float p11 = src[y1 * src_stride + x1];
>
> +
>
> + return (1 - fx) * (1 - fy) * p00 +
>
> + fx * (1 - fy) * p10 +
>
> + (1 - fx) * fy * p01 +
>
> + fx * fy * p11;
>
> +}
>
> +
>
> +static void zoompan_plane(const uint8_t *src, const int src_stride,
>
> + const int src_w, const int src_h, const float crop_x,
>
> + const float crop_y, const float crop_w,
>
> + const float crop_h, uint8_t *dst,
>
> + const int dst_stride, const int dst_w, const int dst_h,
>
> + const int dst_y_start, const int dst_y_end)
>
> +{
>
> + float u, v, x, y, val;
>
> + for (int dst_y = dst_y_start; dst_y < dst_y_end; dst_y++) {
>
> + for (int dst_x = 0; dst_x < dst_w; dst_x++) {
>
> + u = (dst_w > 1) ? (float)dst_x / (float)(dst_w - 1) : 0.0f;
>
> + v = (dst_h > 1) ? (float)dst_y / (float)(dst_h - 1) : 0.0f;
>
> +
>
> + x = crop_x + u * crop_w;
>
> + y = crop_y + v * crop_h;
>
> +
>
> + val = zoompan_pixel(src, src_stride, src_w, src_h, x, y);
>
> +
>
> + dst[dst_y * dst_stride + dst_x] = FFMIN(FFMAX(val, 0), 255);
>
> + }
>
> + }
>
> +}
>
> +
>
> +static int zoompan_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
>
> +{
>
> + AVFilterLink *inlink = ctx->inputs[0];
>
> + ThreadData *td = arg;
>
> +
>
> + const int slice_start = (td->out->height * jobnr) / nb_jobs;
>
> + const int slice_end = (td->out->height * (jobnr+1)) / nb_jobs;
>
> +
>
> + int nb_planes = 4;
>
> + struct {
>
> + int resolution_w;
>
> + int resolution_h;
>
> + } planes[4] = {
>
> + {.resolution_h = 1, .resolution_w = 1},
>
> + {.resolution_h = 1, .resolution_w = 1},
>
> + {.resolution_h = 1, .resolution_w = 1},
>
> + {.resolution_h = 1, .resolution_w = 1},
>
> + };
>
> + switch (inlink->format) {
>
> + case AV_PIX_FMT_YUV410P:
>
> + nb_planes = 3;
>
> + planes[1].resolution_h = 4;
>
> + planes[1].resolution_w = 4;
>
> + planes[2].resolution_h = 4;
>
> + planes[2].resolution_w = 4;
>
> + break;
>
> + case AV_PIX_FMT_YUV411P:
>
> + case AV_PIX_FMT_YUVJ411P:
>
> + nb_planes = 3;
>
> + planes[1].resolution_w = 4;
>
> + planes[2].resolution_w = 4;
>
> + break;
>
> + case AV_PIX_FMT_YUV420P:
>
> + case AV_PIX_FMT_YUVJ420P:
>
> + nb_planes = 3;
>
> + planes[1].resolution_h = 2;
>
> + planes[1].resolution_w = 2;
>
> + planes[2].resolution_h = 2;
>
> + planes[2].resolution_w = 2;
>
> + break;
>
> + case AV_PIX_FMT_YUVA420P:
>
> + nb_planes = 4;
>
> + planes[1].resolution_h = 2;
>
> + planes[1].resolution_w = 2;
>
> + planes[2].resolution_h = 2;
>
> + planes[2].resolution_w = 2;
>
> + break;
>
> + case AV_PIX_FMT_YUV422P:
>
> + case AV_PIX_FMT_YUVJ422P:
>
> + nb_planes = 3;
>
> + planes[1].resolution_w = 2;
>
> + planes[2].resolution_w = 2;
>
> + break;
>
> + case AV_PIX_FMT_YUVA422P:
>
> + nb_planes = 4;
>
> + planes[1].resolution_w = 2;
>
> + planes[2].resolution_w = 2;
>
> + break;
>
> + case AV_PIX_FMT_YUV440P:
>
> + case AV_PIX_FMT_YUVJ440P:
>
> + nb_planes = 3;
>
> + planes[1].resolution_h = 2;
>
> + planes[2].resolution_h = 2;
>
> + break;
>
> + case AV_PIX_FMT_YUV444P:
>
> + case AV_PIX_FMT_YUVJ444P:
>
> + nb_planes = 3;
>
> + break;
>
> + case AV_PIX_FMT_YUVA444P:
>
> + nb_planes = 4;
>
> + break;
>
> + }
>
> +
>
> + for (int i = 0; i < nb_planes; i++) {
>
> + zoompan_plane(td->in->data[i], td->in->linesize[i], td->in->width/planes[i].resolution_w,
>
> + td->in->height/planes[i].resolution_h, td->crop_x/planes[i].resolution_w,
>
> + td->crop_y/planes[i].resolution_h, td->crop_w/planes[i].resolution_w,
>
> + td->crop_h/planes[i].resolution_h, td->out->data[i], td->out->linesize[i],
>
> + td->out->width/planes[i].resolution_w, td->out->height/planes[i].resolution_h,
>
> + slice_start/planes[i].resolution_h, slice_end/planes[i].resolution_h);
>
> + }
>
> + return 0;
>
> +}
>
> +
>
> +static int filter_frame(AVFilterLink *inlink, AVFrame *in)
>
> +{
>
> + AVFilterContext *ctx = inlink->dst;
>
> + YAZFContext *s = ctx->priv;
>
> + AVFilterLink *outlink = ctx->outputs[0];
>
> + int ret;
>
> + AVFrame *out = NULL;
>
> + ThreadData td;
>
> + float zoom, a;
>
> +
>
> + inlink->w = in->width;
>
> + inlink->h = in->height;
>
> + s->var_values[VAR_N] = ff_filter_link(inlink)->frame_count_out;
>
> + s->var_values[VAR_T] = TS2T(in->pts, inlink->time_base);
>
> +
>
> + if ((ret = config_props(outlink)) < 0)
>
> + goto err;
>
> +
>
> + td.w = outlink->w;
>
> + td.h = outlink->h;
>
> + a = (float)outlink->w / (float)outlink->h;
>
> +
>
> + s->var_values[VAR_Z] = zoom = av_clipd(av_expr_eval(s->zoom_expr, s->var_values, NULL), 1, 10);
>
> +
>
> + td.crop_w = (float)inlink->w / zoom;
>
> + td.crop_h = td.crop_w / a;
>
> + if (td.crop_h > inlink->h) {
>
> + td.crop_h = inlink->h;
>
> + td.crop_w = td.crop_h * a;
>
> + }
>
> + s->var_values[VAR_ZW] = td.crop_w;
>
> + s->var_values[VAR_ZH] = td.crop_h;
>
> +
>
> + td.crop_x = av_clipd(av_expr_eval(s->x_expr, s->var_values, NULL), 0, FFMAX(inlink->w - td.crop_w, 0));
>
> + td.crop_y = av_clipd(av_expr_eval(s->y_expr, s->var_values, NULL), 0, FFMAX(inlink->h - td.crop_h, 0));
>
> +
>
> + out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
>
> + if (!out) {
>
> + ret = AVERROR(ENOMEM);
>
> + goto err;
>
> + }
>
> +
>
> + if ((ret = av_frame_copy_props(out, in)) < 0)
>
> + goto err;
>
> +
>
> + td.out = out, td.in = in;
>
> + if ((ret = ff_filter_execute(ctx, zoompan_slice, &td, NULL,
>
> + FFMIN(td.h, ff_filter_get_nb_threads(ctx)))) < 0)
>
> + goto err;
>
> +
>
> + av_frame_free(&in);
>
> + return ff_filter_frame(outlink, out);
>
> +
>
> +err:
>
> + av_frame_free(&in);
>
> + av_frame_free(&out);
>
> + return ret;
>
> +}
>
> +
>
> +static const enum AVPixelFormat pix_fmts[] = {
>
> + AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,
>
> + AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P,
>
> + AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
>
> + AV_PIX_FMT_YUVJ411P, AV_PIX_FMT_YUVJ420P,
>
> + AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ440P,
>
> + AV_PIX_FMT_YUVJ444P,
>
> + AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P,
>
> + AV_PIX_FMT_YUVA444P,
>
> + AV_PIX_FMT_NONE
>
> +};
>
> +
>
> +static av_cold void uninit(AVFilterContext *ctx)
>
> +{
>
> + YAZFContext *s = ctx->priv;
>
> +
>
> + av_expr_free(s->x_expr);
>
> + av_expr_free(s->y_expr);
>
> + av_expr_free(s->zoom_expr);
>
> + av_expr_free(s->w_expr);
>
> + av_expr_free(s->h_expr);
>
> +}
>
> +
>
> +#define OFFSET(x) offsetof(YAZFContext, x)
>
> +#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
>
> +
>
> +static const AVOption yazf_options[] = {
>
> + { "z", "set the zoom expression", OFFSET(zoom_expr_str), AV_OPT_TYPE_STRING, {.str = "1" }, .flags = FLAGS },
>
> + { "x", "set the zoom x expression", OFFSET(x_expr_str), AV_OPT_TYPE_STRING, {.str = "0" }, .flags = FLAGS },
>
> + { "y", "set the zoom y expression", OFFSET(y_expr_str), AV_OPT_TYPE_STRING, {.str = "0" }, .flags = FLAGS },
>
> + { "w", "set the output w expression", OFFSET(w_expr_str), AV_OPT_TYPE_STRING, {.str = "1" }, .flags = FLAGS },
>
> + { "h", "set the output h expression", OFFSET(h_expr_str), AV_OPT_TYPE_STRING, {.str = "1" }, .flags = FLAGS },
>
> + { NULL }
>
> +};
>
> +
>
> +AVFILTER_DEFINE_CLASS(yazf);
>
> +
>
> +static const AVFilterPad avfilter_vf_yazf_inputs[] = {
>
> + {
>
> + .name = "default",
>
> + .type = AVMEDIA_TYPE_VIDEO,
>
> + .filter_frame = filter_frame,
>
> + },
>
> +};
>
> +
>
> +static const AVFilterPad avfilter_vf_yazf_outputs[] = {
>
> + {
>
> + .name = "default",
>
> + .type = AVMEDIA_TYPE_VIDEO,
>
> + .config_props = config_props,
>
> + },
>
> +};
>
> +
>
> +const FFFilter ff_vf_yazf = {
>
> + .p.name = "yazf",
>
> + .p.description = NULL_IF_CONFIG_SMALL("Apply Zoom & Pan effect with floating point precision."),
>
> + .p.priv_class = &yazf_class,
>
> + .p.flags = AVFILTER_FLAG_SLICE_THREADS,
>
> + .init = init,
>
> + .priv_size = sizeof(YAZFContext),
>
> + .uninit = uninit,
>
> + FILTER_INPUTS(avfilter_vf_yazf_inputs),
>
> + FILTER_OUTPUTS(avfilter_vf_yazf_outputs),
>
> + FILTER_PIXFMTS_ARRAY(pix_fmts),
>
> +};
>
_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
To unsubscribe, visit link above, or email
ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
^ permalink raw reply [flat|nested] 5+ messages in thread
* Re: [FFmpeg-devel] [PATCH] avfilter: add vf_yazf filter
2025-06-01 8:21 ` Gyan Doshi
@ 2025-06-01 22:13 ` Michael Niedermayer
2025-06-02 9:25 ` Quentin RENARD
0 siblings, 1 reply; 5+ messages in thread
From: Michael Niedermayer @ 2025-06-01 22:13 UTC (permalink / raw)
To: FFmpeg development discussions and patches
[-- Attachment #1.1: Type: text/plain, Size: 822 bytes --]
On Sun, Jun 01, 2025 at 01:51:05PM +0530, Gyan Doshi wrote:
>
>
> On 2025-05-31 08:19 pm, Quentin RENARD wrote:
> > zoompan filter with floating point precision
>
> If this filter is meant to be same as existing zoompan but more precise,
> then you should modify the original filter with a mode option for FP use.
I would suggest to add a 2nd FFFilter entry to
libavfilter/vf_perspective.c
with a zoompan user interface to access the already existing interpolation code
in vf_perspective.c
we have bilinear and bicubic interpolation there and it should form a
better basis for the implementation
thx
[...]
--
Michael GnuPG fingerprint: 9FF2128B147EF6730BADF133611EC787040B0FAB
What is kyc? Its a tool that makes you give out your real ID, while criminals
give out a forged ID card.
[-- Attachment #1.2: signature.asc --]
[-- Type: application/pgp-signature, Size: 195 bytes --]
[-- Attachment #2: Type: text/plain, Size: 251 bytes --]
_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
To unsubscribe, visit link above, or email
ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
^ permalink raw reply [flat|nested] 5+ messages in thread
* Re: [FFmpeg-devel] [PATCH] avfilter: add vf_yazf filter
2025-06-01 22:13 ` Michael Niedermayer
@ 2025-06-02 9:25 ` Quentin RENARD
2025-06-04 1:11 ` Michael Niedermayer
0 siblings, 1 reply; 5+ messages in thread
From: Quentin RENARD @ 2025-06-02 9:25 UTC (permalink / raw)
To: FFmpeg development discussions and patches
> If this filter is meant to be same as existing zoompan but more precise,
> then you should modify the original filter with a mode option for FP use.
Thing is I hesitated modifying the existing zoompan but there are a few things that don’t fit my use case and I didn’t want to update too many things:
- frame’s PTS is rewritten
- configurable output size is not dynamic (and is not evaluated in frame mode)
- if output aspect ratio is different than input aspect ratio, output image will be stretched
I can:
- add an option to enable PTS passthrough
- add “w” and “h” options on top of “s” options that would allow dynamic output size
- make sure computed crop size respect output aspect ratio
Would that be ok?
> I would suggest to add a 2nd FFFilter entry to
> libavfilter/vf_perspective.c
>
> with a zoompan user interface to access the already existing interpolation code
> in vf_perspective.c
>
> we have bilinear and bicubic interpolation there and it should form a
> better basis for the implementation
Reusing vf_perspective.c interpolation implementations seems like a really great idea, I didn’t know they existed.
However I don’t understand the way you suggest I should do it.
Are you suggesting I should add “const FFFilter ff_vf_yazp = {}” in “vf_perspective.c” and move the zoompan logic there? If not, maybe what you’re suggesting has already been implemented in another filter and I can use it as an example to understand and reproduce it better?
Cheers
Quentin
_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
To unsubscribe, visit link above, or email
ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
^ permalink raw reply [flat|nested] 5+ messages in thread
* Re: [FFmpeg-devel] [PATCH] avfilter: add vf_yazf filter
2025-06-02 9:25 ` Quentin RENARD
@ 2025-06-04 1:11 ` Michael Niedermayer
0 siblings, 0 replies; 5+ messages in thread
From: Michael Niedermayer @ 2025-06-04 1:11 UTC (permalink / raw)
To: FFmpeg development discussions and patches
[-- Attachment #1.1: Type: text/plain, Size: 2113 bytes --]
On Mon, Jun 02, 2025 at 11:25:43AM +0200, Quentin RENARD wrote:
> > If this filter is meant to be same as existing zoompan but more precise,
> > then you should modify the original filter with a mode option for FP use.
>
> Thing is I hesitated modifying the existing zoompan but there are a few things that don’t fit my use case and I didn’t want to update too many things:
>
> - frame’s PTS is rewritten
> - configurable output size is not dynamic (and is not evaluated in frame mode)
> - if output aspect ratio is different than input aspect ratio, output image will be stretched
>
> I can:
>
> - add an option to enable PTS passthrough
> - add “w” and “h” options on top of “s” options that would allow dynamic output size
> - make sure computed crop size respect output aspect ratio
>
> Would that be ok?
>
> > I would suggest to add a 2nd FFFilter entry to
> > libavfilter/vf_perspective.c
> >
> > with a zoompan user interface to access the already existing interpolation code
> > in vf_perspective.c
> >
> > we have bilinear and bicubic interpolation there and it should form a
> > better basis for the implementation
>
> Reusing vf_perspective.c interpolation implementations seems like a really great idea, I didn’t know they existed.
>
> However I don’t understand the way you suggest I should do it.
>
> Are you suggesting I should add “const FFFilter ff_vf_yazp = {}” in “vf_perspective.c” and move the zoompan logic there? If not, maybe what you’re suggesting has already been implemented in another filter and I can use it as an example to understand and reproduce it better?
I guess whats best would be to factor the interpolation code out of
vf_perspective.c into a new file as patch #1
and then use that new factored out code in a new filter that has a zoompan
style user interface in patch #2
thx
[...]
--
Michael GnuPG fingerprint: 9FF2128B147EF6730BADF133611EC787040B0FAB
Good people do not need laws to tell them to act responsibly, while bad
people will find a way around the laws. -- Plato
[-- Attachment #1.2: signature.asc --]
[-- Type: application/pgp-signature, Size: 195 bytes --]
[-- Attachment #2: Type: text/plain, Size: 251 bytes --]
_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
To unsubscribe, visit link above, or email
ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
^ permalink raw reply [flat|nested] 5+ messages in thread
end of thread, other threads:[~2025-06-04 1:11 UTC | newest]
Thread overview: 5+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2025-05-31 14:49 [FFmpeg-devel] [PATCH] avfilter: add vf_yazf filter Quentin RENARD
2025-06-01 8:21 ` Gyan Doshi
2025-06-01 22:13 ` Michael Niedermayer
2025-06-02 9:25 ` Quentin RENARD
2025-06-04 1:11 ` Michael Niedermayer
Git Inbox Mirror of the ffmpeg-devel mailing list - see https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
This inbox may be cloned and mirrored by anyone:
git clone --mirror https://master.gitmailbox.com/ffmpegdev/0 ffmpegdev/git/0.git
# If you have public-inbox 1.1+ installed, you may
# initialize and index your mirror using the following commands:
public-inbox-init -V2 ffmpegdev ffmpegdev/ https://master.gitmailbox.com/ffmpegdev \
ffmpegdev@gitmailbox.com
public-inbox-index ffmpegdev
Example config snippet for mirrors.
AGPL code for this site: git clone https://public-inbox.org/public-inbox.git