YARP
Yet Another Robot Platform
 
Loading...
Searching...
No Matches
FfmpegWriter.cpp
Go to the documentation of this file.
1/*
2 * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT)
3 * SPDX-FileCopyrightText: 2006-2010 RobotCub Consortium
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7/*
8 * Most of this file is from the output_example.c of ffmpeg -
9 * copyright/copypolicy statement follows --
10 */
11
12/*
13 * Libavformat API example: Output a media file in any supported
14 * libavformat format. The default codecs are used.
15 *
16 * SPDX-FileCopyrightText: 2003 Fabrice Bellard
17 *
18 * Permission is hereby granted, free of charge, to any person obtaining a copy
19 * of this software and associated documentation files (the "Software"), to deal
20 * in the Software without restriction, including without limitation the rights
21 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
22 * copies of the Software, and to permit persons to whom the Software is
23 * furnished to do so, subject to the following conditions:
24 *
25 * The above copyright notice and this permission notice shall be included in
26 * all copies or substantial portions of the Software.
27 *
28 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
29 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
30 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
31 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
32 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
33 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
34 * THE SOFTWARE.
35 */
36
37
38#include "FfmpegWriter.h"
39#include "ffmpeg_api.h"
40
41#include <yarp/os/all.h>
42#include <yarp/sig/all.h>
43#include <yarp/os/Log.h>
45
46#include <cstdlib>
47#include <cstring>
48#include <cmath>
49
50#ifndef M_PI
51#define M_PI 3.1415926535897931
52#endif
53
54#define STREAM_FRAME_RATE 25 /* 25 images/s */
55#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
56#define STREAM_PIX_WORK AV_PIX_FMT_RGB24
57
58using namespace yarp::os;
59using namespace yarp::dev;
60using namespace yarp::sig;
61using namespace yarp::sig::file;
62
63namespace {
64YARP_LOG_COMPONENT(FFMPEGWRITER, "yarp.device.ffmpeg_writer")
65}
66
67
68/**************************************************************/
69/* audio output */
70
71float t, tincr, tincr2;
72
77
81
82/*
83 * add an audio output stream
84 */
86{
88 AVStream *st;
89
91 if (!st) {
92 yCFatal(FFMPEGWRITER, "Could not alloc stream");
93 }
94
95 c = st->codec;
96 c->codec_id = codec_id;
97 c->codec_type = AVMEDIA_TYPE_AUDIO;
98
99 /* put sample parameters */
100 c->bit_rate = 64000;
101 c->sample_rate = 44100;
102 c->channels = 2;
103 return st;
104}
105
107{
108 yCInfo(FFMPEGWRITER, "Opening audio stream");
110 AVCodec *codec;
111
112 c = st->codec;
113
114 /* find the audio encoder */
115 codec = avcodec_find_encoder(c->codec_id);
116 if (!codec) {
117 yCFatal(FFMPEGWRITER, "Audio codec not found");
118 }
119
120 /* open it */
121 if (avcodec_open2(c, codec, nullptr) < 0) {
122 yCFatal(FFMPEGWRITER, "Could not open codec");
123 }
124
125 /* init signal generator */
126 t = 0;
127 tincr = 2 * M_PI * 110.0 / c->sample_rate;
128 /* increment frequency by 110 Hz per second */
129 tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
130
131 audio_outbuf_size = 10000;
133
134 /* ugly hack for PCM codecs (will be removed ASAP with new PCM
135 support to compute the input frame size in samples */
136 if (c->frame_size <= 1) {
138 switch(st->codec->codec_id) {
144 break;
145 default:
146 break;
147 }
148 } else {
149 audio_input_frame_size = c->frame_size;
150 }
152 samples_at = 0;
153 samples_channels = c->channels;
155
156
158 "FRAME SIZE is %d / samples size is %d\n",
159 c->frame_size,
161}
162
163/* prepare a 16 bit dummy audio frame of 'frame_size' samples and
164 'nb_channels' channels */
166{
167 int j, i, v;
168 int16_t *q;
169
170 q = samples;
171 for(j=0;j<frame_size;j++) {
172 v = (int)(sin(t) * 10000);
173 for (i = 0; i < nb_channels; i++) {
174 *q++ = v;
175 }
176 t += tincr;
177 tincr += tincr2;
178 }
179}
180
182 void *&samples) {
184 if (!frame) {
185 yCFatal(FFMPEGWRITER, "Could not allocate audio frame");
186 }
187 frame->nb_samples = c->frame_size;
188 frame->format = c->sample_fmt;
189 frame->channel_layout = c->channel_layout;
190 int buffer_size = av_samples_get_buffer_size(nullptr, c->channels,
191 c->frame_size,
192 c->sample_fmt, 0);
193 if (buffer_size < 0) {
194 yCError(FFMPEGWRITER, "Could not get sample buffer size");
195 }
197 if (!samples) {
199 "Could not allocate %d bytes for samples buffer",
201 }
202 /* setup the data pointers in the AVFrame */
203 int ret = avcodec_fill_audio_frame(frame, c->channels, c->sample_fmt,
204 (const uint8_t*)samples, buffer_size, 0);
205 if (ret < 0) {
206 yCFatal(FFMPEGWRITER, "Could not setup audio frame");
207 }
208}
209
211{
215
216 c = st->codec;
217
219
220 AVFrame *frame;
221 void *samples;
224 int got_packet = 0;
226 tmp.data = audio_outbuf;
227 tmp.size = audio_outbuf_size;
229 if (tmp.side_data_elems > 0) {
230 for (int i = 0; i < tmp.side_data_elems; i++) {
231 av_free(tmp.side_data[i].data);
232 }
233 av_freep(&tmp.side_data);
234 tmp.side_data_elems = 0;
235 }
238
239 pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
240 pkt.flags |= AV_PKT_FLAG_KEY;
241 pkt.stream_index= st->index;
242 pkt.data= audio_outbuf;
243
244 /* write the compressed frame in the media file */
245 if (av_write_frame(oc, &pkt) != 0) {
246 yCFatal(FFMPEGWRITER, "Error while writing audio frame");
247 } else {
248 yCInfo(FFMPEGWRITER, "Wrote some audio");
249 }
250}
251
253{
254 yCInfo(FFMPEGWRITER, "Preparing to write audio (%d left over)", samples_at);
256 int key = 1;
257
258 c = st->codec;
259
260 size_t at = 0;
261 while (at<snd.getSamples()) {
262
263 int avail = samples_size - samples_at;
264 int remain = snd.getSamples() - at;
265 int chan = snd.getChannels();
266 if (remain<avail) { avail = remain; }
267 for (int i=0; i<avail; i++) {
268 int offset = samples_at*samples_channels;
269 for (int j=0; j<samples_channels; j++) {
270 samples[offset+j] = snd.get(at,j%chan);
271 }
272 samples_at++;
273 at++;
274 }
275 avail = samples_size - samples_at;
276
277 if (avail==0) {
280
281
282 AVFrame *frame;
283 void *samples;
286 int got_packet = 0;
288 tmp.data = audio_outbuf;
289 tmp.size = audio_outbuf_size;
291 if (tmp.side_data_elems > 0) {
292 for (int i = 0; i < tmp.side_data_elems; i++) {
293 av_free(tmp.side_data[i].data);
294 }
295 av_freep(&tmp.side_data);
296 tmp.side_data_elems = 0;
297 }
300
301 pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base,
302 st->time_base);
303 pkt.dts = pkt.pts;
304 yCTrace(FFMPEGWRITER, "(%d)", pkt.size);
305 if (key) {
306 pkt.flags |= AV_PKT_FLAG_KEY;
307 key = 0;
308 }
309 pkt.stream_index= st->index;
310 pkt.data = audio_outbuf;
311 pkt.duration = 0;
312
313
314 /* write the compressed frame in the media file */
315 if (av_write_frame(oc, &pkt) != 0) {
316 yCFatal(FFMPEGWRITER, "Error while writing audio frame");
317 }
318 samples_at = 0;
319 }
320 }
321 yCInfo(FFMPEGWRITER, " wrote audio\n");
322}
323
325{
326 avcodec_close(st->codec);
327
330}
331
332/**************************************************************/
333/* video output */
334
335
336/* add a video output stream */
338 int w, int h, int framerate)
339{
341 AVStream *st;
342
344 if (!st) {
345 yCFatal(FFMPEGWRITER, "Could not alloc stream");
346 }
347
348 c = st->codec;
349 c->codec_id = codec_id;
350 c->codec_type = AVMEDIA_TYPE_VIDEO;
351
352 /* put sample parameters */
353 c->bit_rate = 400000;
354 /* resolution must be a multiple of two */
355 c->width = w;
356 c->height = h;
357 /* time base: this is the fundamental unit of time (in seconds) in terms
358 of which frame timestamps are represented. for fixed-fps content,
359 timebase should be 1/framerate and timestamp increments should be
360 identically 1. */
361 c->time_base.den = framerate;
362 c->time_base.num = 1;
363 c->gop_size = 12; /* emit one intra frame every twelve frames at most */
364 c->pix_fmt = STREAM_PIX_FMT;
365 if (c->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
366 /* just for testing, we also add B frames */
367 c->max_b_frames = 2;
368 }
369 if (c->codec_id == AV_CODEC_ID_MPEG1VIDEO){
370 /* needed to avoid using macroblocks in which some coeffs overflow
371 this doesnt happen with normal video, it just happens here as the
372 motion of the chroma plane doesnt match the luma plane */
373 c->mb_decision=2;
374 }
375 // some formats want stream headers to be separate
376 if (!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") || !strcmp(oc->oformat->name, "3gp")) {
378 }
379
380
381 return st;
382}
383
384static AVFrame *alloc_picture(int pix_fmt, int width, int height)
385{
386 AVFrame *picture;
388 int size;
389
390 picture = av_frame_alloc();
391 if (!picture) {
392 return nullptr;
393 }
394 size = avpicture_get_size((AVPixelFormat)pix_fmt, width, height);
395 picture_buf = (uint8_t*)av_malloc(size);
396 if (!picture_buf) {
397 av_free(picture);
398 return nullptr;
399 }
401 (AVPixelFormat)pix_fmt, width, height);
402 return picture;
403}
404
405void FfmpegWriter::open_video(AVFormatContext *oc, AVStream *st)
406{
407 yCInfo(FFMPEGWRITER, "Opening video stream");
408 AVCodec *codec;
410
411 c = st->codec;
412
413 /* find the video encoder */
414 codec = avcodec_find_encoder(c->codec_id);
415 if (!codec) {
416 yCFatal(FFMPEGWRITER, "Video codec not found");
417 }
418
419 /* open the codec */
420 if (avcodec_open2(c, codec, nullptr) < 0) {
421 yCFatal(FFMPEGWRITER, "Could not open codec");
422 }
423
424 video_outbuf = nullptr;
425 /* allocate output buffer */
426 /* XXX: API change will be done */
427 /* buffers passed into lav* can be allocated any way you prefer,
428 as long as they're aligned enough for the architecture, and
429 they're freed appropriately (such as using av_free for buffers
430 allocated with av_malloc) */
431 video_outbuf_size = 200000;
432 video_outbuf = (uint8_t*)av_malloc(video_outbuf_size);
433
434 /* allocate the encoded raw picture */
435 picture = alloc_picture(c->pix_fmt, c->width, c->height);
436 if (!picture) {
437 yCFatal(FFMPEGWRITER, "Could not allocate picture");
438 }
439
440 /* if the output format is not YUV420P, then a temporary YUV420P
441 picture is needed too. It is then converted to the required
442 output format */
443 tmp_picture = nullptr;
444 if (c->pix_fmt != AV_PIX_FMT_RGB24) {
445 tmp_picture = alloc_picture(AV_PIX_FMT_RGB24, c->width, c->height);
446 if (!tmp_picture) {
447 yCFatal(FFMPEGWRITER, "Could not allocate temporary picture");
448 }
449 }
450}
451
452static void fill_rgb_image(AVFrame *pict, int frame_index, int width,
453 int height, ImageOf<PixelRgb>& img)
454{
455 int x, y;
456
457 for(y=0;y<height;y++) {
458 for(x=0;x<width;x++) {
459 int base = y*(width*3);
460 pict->data[0][base + x*3] = img.safePixel(x,y).r;
461 pict->data[0][base +x*3+1] = img.safePixel(x,y).g;
462 pict->data[0][base +x*3+2] = img.safePixel(x,y).b;
463 }
464 }
465}
466
467
468void FfmpegWriter::write_video_frame(AVFormatContext *oc, AVStream *st,
470{
471 int out_size, ret;
473
474 c = st->codec;
475
476 if (c->pix_fmt != AV_PIX_FMT_RGB24) {
477 fill_rgb_image(tmp_picture, frame_count, c->width, c->height, img);
478 stable_img_convert((AVPicture *)picture, c->pix_fmt,
479 (AVPicture *)tmp_picture, AV_PIX_FMT_RGB24,
480 c->width, c->height);
481 } else {
482 fill_rgb_image(picture, frame_count, c->width, c->height, img);
483 }
484
485 /* encode the image */
487 int got_packet = 0;
489 tmp.data = video_outbuf;
490 tmp.size = video_outbuf_size;
492 if (tmp.side_data_elems > 0) {
493 for (int i = 0; i < tmp.side_data_elems; i++) {
494 av_free(tmp.side_data[i].data);
495 }
496 av_freep(&tmp.side_data);
497 tmp.side_data_elems = 0;
498 }
499 /* if zero size, it means the image was buffered */
500 if (out_size > 0) {
503
504 pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
505 if (c->coded_frame->key_frame) {
506 pkt.flags |= AV_PKT_FLAG_KEY;
507 }
508 pkt.stream_index= st->index;
509 pkt.data= video_outbuf;
510 pkt.size= out_size;
511
512 /*
513 static int x = 0;
514 yCInfo(FFMPEGWRITER,
515 "%ld / %ld : %ld / %ld --> %d\n",
516 (long int) c->time_base.num,
517 (long int) c->time_base.den,
518 (long int) st->time_base.num,
519 (long int) st->time_base.den,
520 x);
521 pkt.pts = x;
522 x++;
523 */
524
525 /* write the compressed frame in the media file */
526 ret = av_write_frame(oc, &pkt);
527 } else {
528 ret = 0;
529 }
530
531 if (ret != 0) {
532 yCFatal(FFMPEGWRITER, "Error while writing video frame");
533 }
534 frame_count++;
535}
536
537void FfmpegWriter::close_video(AVFormatContext *oc, AVStream *st)
538{
539 avcodec_close(st->codec);
540 av_free(picture->data[0]);
541 av_free(picture);
542 if (tmp_picture) {
543 av_free(tmp_picture->data[0]);
544 av_free(tmp_picture);
545 }
546 av_free(video_outbuf);
547}
548
549
550
551
552/**************************************************************/
553/* YARP adaptation */
554
557 "ffmpeg libavcodec version number %d.%d.%d",
561
562 ready = false;
563 savedConfig.fromString(config.toString());
564
565 // open if possible, if not will do it later
566 return delayedOpen(config);
567}
568
569
570bool FfmpegWriter::delayedOpen(yarp::os::Searchable & config) {
571 yCTrace(FFMPEGWRITER, "DELAYED OPEN %s", config.toString().c_str());
572
573 int w = config.check("width",Value(0),
574 "width of image (must be even)").asInt32();
575 int h = config.check("height",Value(0),
576 "height of image (must be even)").asInt32();
577 int framerate = config.check("framerate",Value(30),
578 "baseline images per second").asInt32();
579
580 int sample_rate = 0;
581 int channels = 0;
582 bool audio = config.check("audio","should audio be included");
583 if (audio) {
584 sample_rate = config.check("sample_rate",Value(44100),
585 "audio samples per second").asInt32();
586 channels = config.check("channels",Value(1),
587 "audio samples per second").asInt32();
588 }
589
590 filename = config.check("out",Value("movie.avi"),
591 "name of movie to write").asString();
592
593 delayed = false;
594 if (w<=0||h<=0) {
595 delayed = true;
596 return true;
597 }
598 ready = true;
599
600 /* initialize libavcodec, and register all codecs and formats */
602
603 /* auto detect the output format from the name. default is
604 mpeg. */
605 fmt = av_guess_format(nullptr, filename.c_str(), nullptr);
606 if (!fmt) {
607 yCInfo(FFMPEGWRITER, "Could not deduce output format from file extension: using MPEG.");
608 fmt = av_guess_format("mpeg", nullptr, nullptr);
609 }
610 if (!fmt) {
611 yCFatal(FFMPEGWRITER, "Could not find suitable output format");
612 }
613
614 /* allocate the output media context */
616 if (!oc) {
617 yCFatal(FFMPEGWRITER, "Memory error");
618 }
619 oc->oformat = fmt;
620 snprintf(oc->filename, sizeof(oc->filename), "%s", filename.c_str());
621
622 /* add the audio and video streams using the default format codecs
623 and initialize the codecs */
624 video_st = nullptr;
625 audio_st = nullptr;
626 if (fmt->video_codec != AV_CODEC_ID_NONE) {
627 video_st = add_video_stream(oc, fmt->video_codec, w, h, framerate);
628 }
629
630 if (audio) {
631 yCInfo(FFMPEGWRITER, "Adding audio %dx%d", sample_rate, channels);
632 if (fmt->audio_codec != AV_CODEC_ID_NONE) {
633 audio_st = add_audio_stream(oc, fmt->audio_codec);
634 if (audio_st!=nullptr) {
635 AVCodecContext *c = audio_st->codec;
636 c->sample_rate = sample_rate;
637 c->channels = channels;
638 } else {
639 yCError(FFMPEGWRITER, "Failed to add audio");
640 }
641 } else {
642 yCWarning(FFMPEGWRITER, "No audio codec available");
643 }
644 } else {
645 yCInfo(FFMPEGWRITER, "Skipping audio");
646 }
647
648 av_dump_format(oc, 0, filename.c_str(), 1);
649
650 /* now that all the parameters are set, we can open the audio and
651 video codecs and allocate the necessary encode buffers */
652 if (video_st) {
653 open_video(oc, video_st);
654 }
655 if (audio_st) {
656 open_audio(oc, audio_st);
657 }
658
659 /* open the output file, if needed */
660 if (!(fmt->flags & AVFMT_NOFILE)) {
661 if (avio_open(&oc->pb, filename.c_str(), AVIO_FLAG_WRITE) < 0) {
662 yCFatal(FFMPEGWRITER, "Could not open '%s'", filename.c_str());
663 }
664 }
665
666 /* write the stream header, if any */
668
669 return true;
670}
671
673 if (!isOk()) { return false; }
674
675 /* close each codec */
676 if (video_st) {
677 close_video(oc, video_st);
678 }
679 if (audio_st) {
680 close_audio(oc, audio_st);
681 }
682
683 /* write the trailer, if any */
685
686 /* free the streams */
687 for(unsigned int i = 0; i < oc->nb_streams; i++) {
688 av_freep(&oc->streams[i]->codec);
689 av_freep(&oc->streams[i]);
690 }
691
692 if (!(fmt->flags & AVFMT_NOFILE)) {
693 /* close the output file */
694 avio_close(oc->pb);
695 }
696
697 /* free the stream */
698 av_free(oc);
699
700 yCInfo(FFMPEGWRITER, "Closed media file %s", filename.c_str());
701
702 return true;
703}
704
706 if (delayed) {
707 savedConfig.put("width",Value((int)image.width()));
708 savedConfig.put("height",Value((int)image.height()));
709 }
710 if (!isOk()) { return false; }
711
712 /* compute current audio and video time */
713 if (audio_st) {
714 audio_pts = (double)av_stream_get_end_pts(audio_st) * audio_st->time_base.num / audio_st->time_base.den;
715 } else {
716 audio_pts = 0.0;
717 }
718
719 if (video_st) {
720 video_pts = (double)av_stream_get_end_pts(video_st) * video_st->time_base.num / video_st->time_base.den;
721 } else {
722 video_pts = 0.0;
723 }
724
725 if (!(audio_st || video_st)) {
726 return false;
727 }
728
729 /* write interleaved audio and video frames */
730 if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {
731 write_audio_frame(oc, audio_st);
732 } else {
733 write_video_frame(oc, video_st, image);
734 }
735
736 return true;
737}
738
739
740
742 yarp::sig::Sound& sound) {
743 if (delayed) {
744 savedConfig.put("width",Value((int)image.width()));
745 savedConfig.put("height",Value((int)image.height()));
746 savedConfig.put("sample_rate",Value((int)sound.getFrequency()));
747 savedConfig.put("channels",Value((int)sound.getChannels()));
748 savedConfig.put("audio",Value(1));
749 }
750 if (!isOk()) { return false; }
751
752 /* write interleaved audio and video frames */
753 write_video_frame(oc, video_st, image);
754 write_audio_frame(oc, audio_st, sound);
755 return true;
756}
static AVStream * add_video_stream(AVFormatContext *oc, AVCodecID codec_id, int w, int h, int framerate)
static AVFrame * alloc_picture(int pix_fmt, int width, int height)
int samples_size
float tincr
static AVStream * add_audio_stream(AVFormatContext *oc, AVCodecID codec_id)
static void write_audio_frame(AVFormatContext *oc, AVStream *st)
static void close_audio(AVFormatContext *oc, AVStream *st)
static void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
static void make_audio_frame(AVCodecContext *c, AVFrame *&frame, void *&samples)
int audio_input_frame_size
int samples_at
static void fill_rgb_image(AVFrame *pict, int frame_index, int width, int height, ImageOf< PixelRgb > &img)
int audio_outbuf_size
uint8_t * audio_outbuf
float tincr2
#define STREAM_PIX_FMT
#define M_PI
static void open_audio(AVFormatContext *oc, AVStream *st)
int16_t * samples
float t
int samples_channels
bool ret
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
bool putImage(yarp::sig::ImageOf< yarp::sig::PixelRgb > &image) override
Write an image to the device.
bool close() override
Close the DeviceDriver.
virtual bool putAudioVisual(yarp::sig::ImageOf< yarp::sig::PixelRgb > &image, yarp::sig::Sound &sound) override
Write an image and sound.
A mini-server for performing network communication in the background.
void fromString(const std::string &txt, bool wipe=true)
Interprets a string as a list of properties.
void put(const std::string &key, const std::string &value)
Associate the given key with the given string.
A base class for nested structures that can be searched.
Definition Searchable.h:56
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
virtual std::string toString() const =0
Return a standard text representation of the content of the object.
A single value (typically within a Bottle).
Definition Value.h:43
Typed image class.
Definition Image.h:616
size_t width() const
Gets width of image in pixels.
Definition Image.h:163
size_t height() const
Gets height of image in pixels.
Definition Image.h:169
Class for storing sounds See Audio in YARP for additional documentation on YARP audio.
Definition Sound.h:25
size_t getChannels() const
Get the number of channels of the sound.
Definition Sound.cpp:593
int getFrequency() const
Get the frequency of the sound (i.e.
Definition Sound.cpp:354
audio_sample get(size_t sample, size_t channel=0) const
Definition Sound.cpp:292
size_t getSamples() const
Get the number of samples contained in the sound.
Definition Sound.cpp:588
int stable_img_convert(AVPicture *dst, int dst_pix_fmt, const AVPicture *src, int src_pix_fmt, int src_width, int src_height)
#define yCInfo(component,...)
#define yCError(component,...)
#define yCTrace(component,...)
#define yCWarning(component,...)
#define YARP_LOG_COMPONENT(name,...)
#define yCFatal(component,...)
For streams capable of holding different kinds of content, check what they actually have.
An interface to the operating system, including Port based communication.
constexpr char framerate[]