diff options
Diffstat (limited to 'ext')
93 files changed, 360 insertions, 446 deletions
diff --git a/ext/assrender/gstassrender.c b/ext/assrender/gstassrender.c index a3629bfd0..07e33d452 100644 --- a/ext/assrender/gstassrender.c +++ b/ext/assrender/gstassrender.c @@ -20,15 +20,15 @@ /** * SECTION:element-assrender + * @title: assrender * * Renders timestamped SSA/ASS subtitles on top of a video stream. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink d. ! queue ! h264parse ! avdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/bs2b/gstbs2b.c b/ext/bs2b/gstbs2b.c index dd29983a9..dc597087f 100644 --- a/ext/bs2b/gstbs2b.c +++ b/ext/bs2b/gstbs2b.c @@ -20,17 +20,17 @@ /** * SECTION:element-bs2b + * @title: bs2b * - * Improve headphone listening of stereo audio records using the bs2b library. + * Improve headphone listening of stereo audio records using the bs2b library. * It does so by mixing the left and right channel in a way that simulates * a stereo speaker setup while using headphones. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 audiotestsrc ! "audio/x-raw,channel-mask=(bitmask)0x1" ! interleave name=i ! bs2b ! autoaudiosink audiotestsrc freq=330 ! "audio/x-raw,channel-mask=(bitmask)0x2" ! i. * ]| Play two independent sine test sources and crossfeed them. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/chromaprint/gstchromaprint.c b/ext/chromaprint/gstchromaprint.c index 98b3ae392..6ebadd31a 100644 --- a/ext/chromaprint/gstchromaprint.c +++ b/ext/chromaprint/gstchromaprint.c @@ -23,18 +23,18 @@ /** * SECTION:element-chromaprint + * @title: chromaprint * * The chromaprint element calculates an acoustic fingerprint for an * audio stream which can be used to identify a song and look up * further metadata from the <ulink url="http://acoustid.org/">Acoustid</ulink> * and Musicbrainz databases. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -m uridecodebin uri=file:///path/to/song.ogg ! audioconvert ! chromaprint ! fakesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlbasesink.c b/ext/curl/gstcurlbasesink.c index 40c61826d..375c46aef 100644 --- a/ext/curl/gstcurlbasesink.c +++ b/ext/curl/gstcurlbasesink.c @@ -19,14 +19,14 @@ /** * SECTION:element-curlsink + * @title: curlsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * a server (e.g. a HTTP/FTP server). * - * <refsect2> - * <title>Example launch line (upload a JPEG file to an HTTP server)</title> + * ## Example launch line (upload a JPEG file to an HTTP server) * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsink \ * file-name=image.jpg \ @@ -35,7 +35,7 @@ * content-type=image/jpeg \ * use-content-length=false * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlfilesink.c b/ext/curl/gstcurlfilesink.c index b4aa1747b..519c135ac 100644 --- a/ext/curl/gstcurlfilesink.c +++ b/ext/curl/gstcurlfilesink.c @@ -19,21 +19,20 @@ /** * SECTION:element-curlfilesink + * @title: curlfilesink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * a local or network drive. * - * <refsect2> - * <title>Example launch line (upload a JPEG file to /home/test/images - * directory)</title> + * ## Example launch line (upload a JPEG file to /home/test/images directory) * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlfilesink \ * file-name=image.jpg \ * location=file:///home/test/images/ * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlftpsink.c b/ext/curl/gstcurlftpsink.c index 92b5c4e08..55b6f6a9a 100644 --- a/ext/curl/gstcurlftpsink.c +++ b/ext/curl/gstcurlftpsink.c @@ -19,21 +19,23 @@ /** * SECTION:element-curlftpsink + * @title: curlftpsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * an FTP server. * - * <refsect2> - * <title>Example launch line (upload a JPEG file to /home/test/images - * directory)</title> + * ## Example launch line + * + * Upload a JPEG file to /home/test/images * directory) + * * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlftpsink \ * file-name=image.jpg \ * location=ftp://192.168.0.1/images/ * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlhttpsink.c b/ext/curl/gstcurlhttpsink.c index a50a057e4..3df255574 100644 --- a/ext/curl/gstcurlhttpsink.c +++ b/ext/curl/gstcurlhttpsink.c @@ -19,14 +19,17 @@ /** * SECTION:element-curlhttpsink + * @title: curlhttpsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * an HTTP server. * - * <refsect2> - * <title>Example launch line (upload a JPEG file to an HTTP server)</title> + * ## Example launch line + * + * Upload a JPEG file to an HTTP server. + * * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlhttpsink \ * file-name=image.jpg \ @@ -35,7 +38,6 @@ * content-type=image/jpeg \ * use-content-length=false * ]| - * </refsect2> */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlsftpsink.c b/ext/curl/gstcurlsftpsink.c index 939ebd8b9..c3549c81f 100644 --- a/ext/curl/gstcurlsftpsink.c +++ b/ext/curl/gstcurlsftpsink.c @@ -19,14 +19,17 @@ /** * SECTION:element-curlsftpsink + * @title: curlsftpsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * a SFTP (SSH File Transfer Protocol) server. * - * <refsect2> - * <title>Example launch line (upload a file to /home/john/sftp_tests/)</title> + * ## Example launch line + * + * Upload a file to /home/john/sftp_tests/ + * * |[ * gst-launch-1.0 filesrc location=/home/jdoe/some.file ! curlsftpsink \ * file-name=some.file.backup \ @@ -36,8 +39,6 @@ * ssh-priv-keyfile=/home/jdoe/.ssh/id_rsa \ * create-dirs=TRUE * ]| - * </refsect2> - * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlsmtpsink.c b/ext/curl/gstcurlsmtpsink.c index 07a147430..b9e9bf2d9 100644 --- a/ext/curl/gstcurlsmtpsink.c +++ b/ext/curl/gstcurlsmtpsink.c @@ -19,14 +19,17 @@ /** * SECTION:element-curlsink + * @title: curlsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * an SMTP server. * - * <refsect2> - * <title>Example launch line (upload a JPEG file to an SMTP server)</title> + * ## Example launch line + * + * Upload a JPEG file to an SMTP server. + * * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsmtpsink \ * file-name=image.jpg \ @@ -38,7 +41,7 @@ * use-ssl=TRUE \ * insecure=TRUE * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlsshsink.c b/ext/curl/gstcurlsshsink.c index b06cbcd5b..cdbff086b 100644 --- a/ext/curl/gstcurlsshsink.c +++ b/ext/curl/gstcurlsshsink.c @@ -19,6 +19,7 @@ /** * SECTION:element-curlsshsink + * @title: curlsshsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * diff --git a/ext/curl/gstcurltlssink.c b/ext/curl/gstcurltlssink.c index 35c1c1c5a..94f9d6544 100644 --- a/ext/curl/gstcurltlssink.c +++ b/ext/curl/gstcurltlssink.c @@ -19,6 +19,7 @@ /** * SECTION:element-curltlssink + * @title: curltlssink * @short_description: sink that uploads data to a server using libcurl * @see_also: * diff --git a/ext/daala/gstdaaladec.c b/ext/daala/gstdaaladec.c index 90fb38f5d..fe3d300f6 100644 --- a/ext/daala/gstdaaladec.c +++ b/ext/daala/gstdaaladec.c @@ -23,6 +23,7 @@ /** * SECTION:element-daaladec + * @title: daaladec * @see_also: daalaenc, oggdemux * * This element decodes daala streams into raw video @@ -30,13 +31,12 @@ * video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org * Foundation</ulink>. * - * <refsect2> - * <title>Example pipeline</title> + * ## Example pipeline * |[ * gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! daaladec ! xvimagesink * ]| This example pipeline will decode an ogg stream and decodes the daala video. Refer to * the daalaenc example to create the ogg file. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/daala/gstdaalaenc.c b/ext/daala/gstdaalaenc.c index 7209ddfa3..909f14bc0 100644 --- a/ext/daala/gstdaalaenc.c +++ b/ext/daala/gstdaalaenc.c @@ -23,6 +23,7 @@ /** * SECTION:element-daalaenc + * @title: daalaenc * @see_also: daaladec, oggmux * * This element encodes raw video into a Daala stream. @@ -30,14 +31,13 @@ * video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org * Foundation</ulink>. * - * <refsect2> - * <title>Example pipeline</title> + * ## Example pipeline * |[ * gst-launch-1.0 -v videotestsrc num-buffers=1000 ! daalaenc ! oggmux ! filesink location=videotestsrc.ogg * ]| This example pipeline will encode a test video source to daala muxed in an * ogg container. Refer to the daaladec documentation to decode the create * stream. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/dash/gstdashdemux.c b/ext/dash/gstdashdemux.c index fe9106182..ddb32468c 100644 --- a/ext/dash/gstdashdemux.c +++ b/ext/dash/gstdashdemux.c @@ -29,9 +29,10 @@ */ /** * SECTION:element-dashdemux + * @title: dashdemux * * DASH demuxer element. - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 playbin uri="http://www-itec.uni-klu.ac.at/ftp/datasets/mmsys12/RedBullPlayStreets/redbull_4s/RedBullPlayStreets_4s_isoffmain_DIS_23009_1_v_2_1c2_2011_08_30.mpd" * ]| diff --git a/ext/dc1394/gstdc1394src.c b/ext/dc1394/gstdc1394src.c index 3ab3026d0..0c92f9231 100644 --- a/ext/dc1394/gstdc1394src.c +++ b/ext/dc1394/gstdc1394src.c @@ -20,6 +20,7 @@ /** * SECTION:element-dc1394src + * @title: dc1394src * * Source for IIDC (Instrumentation & Industrial Digital Camera) firewire * cameras. If several cameras are connected to the system, the desired one @@ -31,8 +32,7 @@ * corresponding video formats are exposed in the capabilities. * The Bayer pattern is left unspecified. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 -v dc1394src ! videoconvert ! autovideosink * ]| Capture and display frames from the first camera available in the system. @@ -41,7 +41,7 @@ * ! "video/x-bayer,format=gbrg,width=1280,height=960,framerate=15/2" \ * ! bayer2rgb ! videoconvert ! autovideosink * ]| Capture and display frames from a specific camera in the desired format. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/directfb/dfbvideosink.c b/ext/directfb/dfbvideosink.c index a6f433bfa..de3bf9163 100644 --- a/ext/directfb/dfbvideosink.c +++ b/ext/directfb/dfbvideosink.c @@ -20,66 +20,52 @@ /** * SECTION:element-dfbvideosink + * @title: dfbvideosink * * DfbVideoSink renders video frames using the * <ulink url="http://www.directfb.org/">DirectFB</ulink> library. * Rendering can happen in two different modes : - * <itemizedlist> - * <listitem> - * <para> - * Standalone: this mode will take complete control of the monitor forcing + * + * * Standalone: this mode will take complete control of the monitor forcing * <ulink url="http://www.directfb.org/">DirectFB</ulink> to fullscreen layout. * This is convenient to test using the gst-launch-1.0 command line tool or * other simple applications. It is possible to interrupt playback while * being in this mode by pressing the Escape key. - * </para> - * <para> * This mode handles navigation events for every input device supported by * the <ulink url="http://www.directfb.org/">DirectFB</ulink> library, it will * look for available video modes in the fb.modes file and try to switch - * the framebuffer video mode to the most suitable one. Depending on + * the framebuffer video mode to the most suitable one. Depending on * hardware acceleration capabilities the element will handle scaling or not. * If no acceleration is available it will do clipping or centering of the * video frames respecting the original aspect ratio. - * </para> - * </listitem> - * <listitem> - * <para> - * Embedded: this mode will render video frames in a + * + * * Embedded: this mode will render video frames in a * #GstDfbVideoSink:surface provided by the * application developer. This is a more advanced usage of the element and - * it is required to integrate video playback in existing + * it is required to integrate video playback in existing * <ulink url="http://www.directfb.org/">DirectFB</ulink> applications. - * </para> - * <para> * When using this mode the element just renders to the - * #GstDfbVideoSink:surface provided by the + * #GstDfbVideoSink:surface provided by the * application, that means it won't handle navigation events and won't resize * the #GstDfbVideoSink:surface to fit video * frames geometry. Application has to implement the necessary code to grab * informations about the negotiated geometry and resize there * #GstDfbVideoSink:surface accordingly. - * </para> - * </listitem> - * </itemizedlist> - * For both modes the element implements a buffer pool allocation system to - * optimize memory allocation time and handle reverse negotiation. Indeed if + * + * For both modes the element implements a buffer pool allocation system to + * optimize memory allocation time and handle reverse negotiation. Indeed if * you insert an element like videoscale in the pipeline the video sink will * negotiate with it to try get a scaled video for either the fullscreen layout * or the application provided external #GstDfbVideoSink:surface. * - * <refsect2> - * <title>Example application</title> - * <para> + * ## Example application + * * <include xmlns="http://www.w3.org/2003/XInclude" href="element-dfb-example.xml" /> - * </para> - * </refsect2> - * <refsect2> - * <title>Example pipelines</title> + * + * ## Example pipelines * |[ * gst-launch-1.0 -v videotestsrc ! dfbvideosink hue=20000 saturation=40000 brightness=25000 * ]| test the colorbalance interface implementation in dfbvideosink - * </refsect2> */ #ifdef HAVE_CONFIG_H @@ -810,7 +796,7 @@ gst_dfbvideosink_setup (GstDfbVideoSink * dfbvideosink) dfbvideosink->backbuffer = FALSE; dfbvideosink->pixel_format = DSPF_UNKNOWN; - /* If we do it all by ourself we create the DirectFB context, get the + /* If we do it all by ourself we create the DirectFB context, get the primary layer and use a fullscreen configuration */ if (!dfbvideosink->ext_surface) { GST_DEBUG_OBJECT (dfbvideosink, "no external surface, taking over " diff --git a/ext/dts/gstdtsdec.c b/ext/dts/gstdtsdec.c index 1c91ce195..2f786f738 100644 --- a/ext/dts/gstdtsdec.c +++ b/ext/dts/gstdtsdec.c @@ -20,18 +20,18 @@ /** * SECTION:element-dtsdec + * @title: dtsdec * * Digital Theatre System (DTS) audio decoder - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch-1.0 dvdreadsrc title=1 ! mpegpsdemux ! dtsdec ! audioresample ! audioconvert ! alsasink * ]| Play a DTS audio track from a dvd. * |[ * gst-launch-1.0 filesrc location=abc.dts ! dtsdec ! audioresample ! audioconvert ! alsasink * ]| Decode a standalone file and play it. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/faac/gstfaac.c b/ext/faac/gstfaac.c index 7e3cf4148..8b861edce 100644 --- a/ext/faac/gstfaac.c +++ b/ext/faac/gstfaac.c @@ -20,16 +20,16 @@ /** * SECTION:element-faac + * @title: faac * @see_also: faad * * faac encodes raw audio to AAC (MPEG-4 part 3) streams. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 audiotestsrc wave=sine num-buffers=100 ! audioconvert ! faac ! matroskamux ! filesink location=sine.mkv * ]| Encode a sine beep as aac and write to matroska container. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/faad/gstfaad.c b/ext/faad/gstfaad.c index 09d927c82..0c6122fdb 100644 --- a/ext/faad/gstfaad.c +++ b/ext/faad/gstfaad.c @@ -20,19 +20,19 @@ /** * SECTION:element-faad + * @title: faad * @seealso: faac * * faad decodes AAC (MPEG-4 part 3) stream. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 filesrc location=example.mp4 ! qtdemux ! faad ! audioconvert ! audioresample ! autoaudiosink * ]| Play aac from mp4 file. * |[ * gst-launch-1.0 filesrc location=example.adts ! faad ! audioconvert ! audioresample ! autoaudiosink * ]| Play standalone aac bitstream. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/fluidsynth/gstfluiddec.c b/ext/fluidsynth/gstfluiddec.c index 62262ecd7..a98b5f90b 100644 --- a/ext/fluidsynth/gstfluiddec.c +++ b/ext/fluidsynth/gstfluiddec.c @@ -21,19 +21,19 @@ /** * SECTION:element-fluiddec + * @title: fluiddec * @see_also: timidity, wildmidi * * This element renders midi-events as audio streams using * <ulink url="http://fluidsynth.sourceforge.net//">Fluidsynth</ulink>. * It offers better sound quality compared to the timidity or wildmidi element. * - * <refsect2> - * <title>Example pipeline</title> + * ## Example pipeline * |[ * gst-launch-1.0 filesrc location=song.mid ! midiparse ! fluiddec ! pulsesink * ]| This example pipeline will parse the midi and render to raw audio which is * played via pulseaudio. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglbumper.c b/ext/gl/gstglbumper.c index 9fdb4a079..8dd812cb8 100644 --- a/ext/gl/gstglbumper.c +++ b/ext/gl/gstglbumper.c @@ -21,16 +21,16 @@ /** * SECTION:element-glbumper + * @title: glbumper * * Bump mapping using the normal method. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! glupload ! glbumper location=normalmap.bmp ! glimagesink * ]| A pipeline to test normal mapping. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglcolorbalance.c b/ext/gl/gstglcolorbalance.c index 33b5a50a3..c2482a3d6 100644 --- a/ext/gl/gstglcolorbalance.c +++ b/ext/gl/gstglcolorbalance.c @@ -22,16 +22,16 @@ /** * SECTION:element-glcolorbalance + * @title: glcolorbalance * * Adjusts brightness, contrast, hue, saturation on a video stream. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc ! glupload ! glcolorbalance saturation=0.0 ! glcolorconvert ! gldownload ! ximagesink * ]| This pipeline converts the image to black and white by setting the * saturation to 0.0. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglcolorscale.c b/ext/gl/gstglcolorscale.c index 2ca5480e9..b7b0709dc 100644 --- a/ext/gl/gstglcolorscale.c +++ b/ext/gl/gstglcolorscale.c @@ -20,17 +20,15 @@ /** * SECTION:element-glcolorscale + * @title: glcolorscale * * video frame scaling and colorspace conversion. * - * <refsect2> - * <title>Scaling and Color space conversion</title> - * <para> + * ## Scaling and Color space conversion + * * Equivalent to glupload ! gldownload. - * </para> - * </refsect2> - * <refsect2> - * <title>Examples</title> + * + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! video/x-raw ! glcolorscale ! ximagesink * ]| A pipeline to test colorspace conversion. @@ -40,7 +38,7 @@ * video/x-raw, width=320, height=240, format=YV12 ! videoconvert ! autovideosink * ]| A pipeline to test hardware scaling and colorspace conversion. * FBO and GLSL are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgldeinterlace.c b/ext/gl/gstgldeinterlace.c index 568f26702..dbc2c24d4 100644 --- a/ext/gl/gstgldeinterlace.c +++ b/ext/gl/gstgldeinterlace.c @@ -20,16 +20,16 @@ /** * SECTION:element-deinterlace + * @title: deinterlace * * Deinterlacing using based on fragment shaders. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! gldeinterlace ! glimagesink * ]| * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgldifferencematte.c b/ext/gl/gstgldifferencematte.c index b50e69bf7..aa1092a17 100644 --- a/ext/gl/gstgldifferencematte.c +++ b/ext/gl/gstgldifferencematte.c @@ -20,16 +20,16 @@ /** * SECTION:element-gldifferencematte. + * @title: gldifferencematte. * * Saves a background frame and replace it with a pixbuf. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! gldifferencemate location=backgroundimagefile ! glimagesink * ]| * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgleffects.c b/ext/gl/gstgleffects.c index 2af3368af..d8a7da107 100644 --- a/ext/gl/gstgleffects.c +++ b/ext/gl/gstgleffects.c @@ -20,16 +20,16 @@ /** * SECTION:element-gleffects. + * @title: gleffects. * * GL Shading Language effects. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! gleffects effect=5 ! glimagesink * ]| * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfilterapp.c b/ext/gl/gstglfilterapp.c index 925373b89..fc2e6ac5e 100644 --- a/ext/gl/gstglfilterapp.c +++ b/ext/gl/gstglfilterapp.c @@ -20,20 +20,18 @@ /** * SECTION:element-glfilterapp + * @title: glfilterapp * * The resize and redraw callbacks can be set from a client code. * - * <refsect2> - * <title>CLient callbacks</title> - * <para> - * The graphic scene can be written from a client code through the + * ## CLient callbacks + * + * The graphic scene can be written from a client code through the * two glfilterapp properties. - * </para> - * </refsect2> - * <refsect2> - * <title>Examples</title> + * + * ## Examples * see gst-plugins-gl/tests/examples/generic/recordgraphic - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfiltercube.c b/ext/gl/gstglfiltercube.c index 735d45e5e..427782afd 100644 --- a/ext/gl/gstglfiltercube.c +++ b/ext/gl/gstglfiltercube.c @@ -20,11 +20,11 @@ /** * SECTION:element-glfiltercube + * @title: glfiltercube * * The resize and redraw callbacks can be set from a client code. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! glfiltercube ! glimagesink * ]| A pipeline to mpa textures on the 6 cube faces.. @@ -37,7 +37,7 @@ * gst-launch-1.0 -v videotestsrc ! video/x-raw, width=640, height=480 ! glfiltercube ! glimagesink * ]| Resize scene before drawing the cube. * The scene size is greater than the input video size. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfilterglass.c b/ext/gl/gstglfilterglass.c index 254199dd5..ea20239a4 100644 --- a/ext/gl/gstglfilterglass.c +++ b/ext/gl/gstglfilterglass.c @@ -21,11 +21,11 @@ /** * SECTION:element-glfilterglass + * @title: glfilterglass * * Map textures on moving glass. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! glfilterglass ! glimagesink * ]| A pipeline inspired from http://www.mdk.org.pl/2007/11/17/gl-colorspace-conversions @@ -33,7 +33,7 @@ * |[ * gst-launch-1.0 -v videotestsrc ! glfilterglass ! video/x-raw, width=640, height=480 ! glimagesink * ]| The scene is greater than the input size. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfilterreflectedscreen.c b/ext/gl/gstglfilterreflectedscreen.c index fb1533890..62ccd9bf7 100644 --- a/ext/gl/gstglfilterreflectedscreen.c +++ b/ext/gl/gstglfilterreflectedscreen.c @@ -20,15 +20,15 @@ /** * SECTION:element-glfilterreflectedscreen + * @title: glfilterreflectedscreen * * Map Video Texture upon a screen, on a reflecting surface * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! glfilterreflectedscreen ! glimagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfiltershader.c b/ext/gl/gstglfiltershader.c index f3434690b..8f5a990cf 100644 --- a/ext/gl/gstglfiltershader.c +++ b/ext/gl/gstglfiltershader.c @@ -21,11 +21,11 @@ /** * SECTION:element-glshader + * @title: glshader * * OpenGL fragment shader filter * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! glshader fragment="\"`cat myshader.frag`\"" ! glimagesink * ]| @@ -45,13 +45,12 @@ * uniform float time; * uniform float width; * uniform float height; - * + * * void main () { * gl_FragColor = texture2D( tex, v_texcoord ); * } * ]| * - * </refsect2> */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/ext/gl/gstglimagesink.c b/ext/gl/gstglimagesink.c index 1c8d314f2..f6a61ac92 100644 --- a/ext/gl/gstglimagesink.c +++ b/ext/gl/gstglimagesink.c @@ -23,6 +23,7 @@ /** * SECTION:element-glimagesink + * @title: glimagesink * * glimagesink renders video frames to a drawable on a local or remote * display using OpenGL. This element can receive a Window ID from the @@ -34,28 +35,23 @@ * See the #GstGLDisplay documentation for a list of environment variables that * can override window/platform detection. * - * <refsect2> - * <title>Scaling</title> - * <para> + * ## Scaling + * * Depends on the driver, OpenGL handles hardware accelerated * scaling of video frames. This means that the element will just accept * incoming video frames no matter their geometry and will then put them to the * drawable scaling them on the fly. Using the #GstGLImageSink:force-aspect-ratio * property it is possible to enforce scaling with a constant aspect ratio, * which means drawing black borders around the video frame. - * </para> - * </refsect2> - * <refsect2> - * <title>Events</title> - * <para> + * + * ## Events + * * Through the gl thread, glimagesink handle some events coming from the drawable * to manage its appearance even when the data is not flowing (GST_STATE_PAUSED). * That means that even when the element is paused, it will receive expose events * from the drawable and draw the latest frame with correct borders/aspect-ratio. - * </para> - * </refsect2> - * <refsect2> - * <title>Examples</title> + * + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! video/x-raw ! glimagesink * ]| A pipeline to test hardware scaling. @@ -80,7 +76,7 @@ * ]| The graphic FPS scene can be greater than the input video FPS. * The graphic scene can be written from a client code through the * two glfilterapp properties. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglmosaic.c b/ext/gl/gstglmosaic.c index fcd4926a7..77aec599c 100644 --- a/ext/gl/gstglmosaic.c +++ b/ext/gl/gstglmosaic.c @@ -20,14 +20,14 @@ /** * SECTION:element-glmosaic + * @title: glmosaic * * glmixer sub element. N gl sink pads to 1 source pad. * N + 1 OpenGL contexts shared together. * N <= 6 because the rendering is more a like a cube than a mosaic * Each opengl input stream is rendered on a cube face * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! video/x-raw, format=YUY2 ! queue ! glmosaic name=m ! glimagesink \ * videotestsrc pattern=12 ! video/x-raw, format=I420, framerate=5/1, width=100, height=200 ! queue ! m. \ @@ -37,7 +37,7 @@ * videotestsrc ! gleffects effect=6 ! queue ! m. * ]| * FBO (Frame Buffer Object) is required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgloverlay.c b/ext/gl/gstgloverlay.c index 8c21bac77..173d11820 100644 --- a/ext/gl/gstgloverlay.c +++ b/ext/gl/gstgloverlay.c @@ -20,16 +20,16 @@ /** * SECTION:element-gloverlay + * @title: gloverlay * * Overlay GL video texture with a PNG image * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! gloverlay location=image.jpg ! glimagesink * ]| * FBO (Frame Buffer Object) is required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglstereomix.c b/ext/gl/gstglstereomix.c index 18e7111a7..78a2934fa 100644 --- a/ext/gl/gstglstereomix.c +++ b/ext/gl/gstglstereomix.c @@ -23,6 +23,7 @@ /** * SECTION:element-glstereomix + * @title: glstereomix * * Combine 2 input streams to produce a stereoscopic output * stream. Input views are taken from the left pad and right pad @@ -34,8 +35,7 @@ * The multiview representation on the output is chosen according to * the downstream caps. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc pattern=ball name=left \ * videotestsrc name=right glstereomix name=mix \ @@ -52,10 +52,10 @@ * right. ! video/x-raw,width=640,height=480 ! glupload ! mix. \ * mix. ! video/x-raw'(memory:GLMemory)',multiview-mode=top-bottom ! \ * glcolorconvert ! gldownload ! queue ! x264enc ! h264parse ! \ - * mp4mux ! progressreport ! filesink location=output.mp4 + * mp4mux ! progressreport ! filesink location=output.mp4 * ]| Mix the input from a camera to the left view, and videotestsrc to the right view, * and encode as a top-bottom frame packed H.264 video. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/ext/gl/gstglstereosplit.c b/ext/gl/gstglstereosplit.c index 5650ab4d6..933f3c593 100644 --- a/ext/gl/gstglstereosplit.c +++ b/ext/gl/gstglstereosplit.c @@ -20,16 +20,16 @@ /** * SECTION:element-glstereosplit + * @title: glstereosplit * * Receive a stereoscopic video stream and split into left/right * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glstereosplit name=s ! queue ! glimagesink s. ! queue ! glimagesink * ]| * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgltestsrc.c b/ext/gl/gstgltestsrc.c index 83218b4ac..e9c7df85a 100644 --- a/ext/gl/gstgltestsrc.c +++ b/ext/gl/gstgltestsrc.c @@ -23,21 +23,19 @@ /** * SECTION:element-gltestsrc + * @title: gltestsrc * - * <refsect2> - * <para> * The gltestsrc element is used to produce test video texture. * The video test produced can be controlled with the "pattern" * property. - * </para> - * <title>Example launch line</title> - * <para> - * <programlisting> + * + * ## Example launch line + * + * |[ * gst-launch-1.0 -v gltestsrc pattern=smpte ! glimagesink - * </programlisting> + * ]| * Shows original SMPTE color bars in a window. - * </para> - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgltransformation.c b/ext/gl/gstgltransformation.c index f6c9e3f26..729a9f653 100644 --- a/ext/gl/gstgltransformation.c +++ b/ext/gl/gstgltransformation.c @@ -21,11 +21,11 @@ /** * SECTION:element-gltransformation + * @title: gltransformation * * Transforms video on the GPU. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 gltestsrc ! gltransformation rotation-z=45 ! glimagesink * ]| A pipeline to rotate by 45 degrees @@ -38,7 +38,7 @@ * |[ * gst-launch-1.0 gltestsrc ! gltransformation rotation-x=-45 ortho=True ! glimagesink * ]| Rotate the video around the X-Axis by -45° with an orthographic projection - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglvideoflip.c b/ext/gl/gstglvideoflip.c index 5d2dac5ee..d5bb2ac57 100644 --- a/ext/gl/gstglvideoflip.c +++ b/ext/gl/gstglvideoflip.c @@ -20,15 +20,15 @@ /** * SECTION:element-glvideo_flip + * @title: glvideo_flip * * Transforms video on the GPU. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! glvideoflip method=clockwise ! glimagesinkelement * ]| This pipeline flips the test image 90 degrees clockwise. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglvideomixer.c b/ext/gl/gstglvideomixer.c index ebd682752..5df126d87 100644 --- a/ext/gl/gstglvideomixer.c +++ b/ext/gl/gstglvideomixer.c @@ -20,13 +20,13 @@ /** * SECTION:element-glvideomixer + * @title: glvideomixer * * Composites a number of streams into a single output scene using OpenGL in * a similar fashion to compositor and videomixer. See the compositor plugin * for documentation about the #GstGLVideoMixerPad properties. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 glvideomixer name=m ! glimagesink \ * videotestsrc ! video/x-raw, format=YUY2 ! glupload ! glcolorconvert ! m. \ @@ -36,7 +36,7 @@ * videotestsrc ! glupload ! glfiltercube ! queue ! m. \ * videotestsrc ! glupload ! gleffects effect=6 ! queue ! m.gst-launch-1.0 glvideomixer name=m ! glimagesink \ * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglviewconvert.c b/ext/gl/gstglviewconvert.c index 617df449d..670305c81 100644 --- a/ext/gl/gstglviewconvert.c +++ b/ext/gl/gstglviewconvert.c @@ -22,14 +22,14 @@ /** * SECTION:element-glviewconvert + * @title: glviewconvert * * Convert stereoscopic video between different representations using fragment shaders. * * The element can use either property settings or caps negotiation to choose the * input and output formats to process. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! glviewconvert ! glimagesink * ]| Simple placebo example demonstrating identity passthrough of mono video @@ -39,7 +39,7 @@ * ]| Force re-interpretation of the input checkers pattern as a side-by-side stereoscopic * image and display in glimagesink. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstopengl.c b/ext/gl/gstopengl.c index 4a8c4d6a1..75f8d3332 100644 --- a/ext/gl/gstopengl.c +++ b/ext/gl/gstopengl.c @@ -23,20 +23,20 @@ /** * SECTION:plugin-opengl + * @title: GstOpengl * * Cross-platform OpenGL plugin. - * <refsect2> - * <title>Debugging</title> - * </refsect2> - * <refsect2> - * <title>Examples</title> + * + * ## Debugging + * + * ## Examples * |[ * gst-launch-1.0 --gst-debug=gldisplay:3 videotestsrc ! glimagesink * ]| A debugging pipeline. |[ * GST_DEBUG=gl*:6 gst-launch-1.0 videotestsrc ! glimagesink * ]| A debugging pipelines related to shaders. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gtk/gstgtkbasesink.c b/ext/gtk/gstgtkbasesink.c index 44c8cbd46..843c97f1b 100644 --- a/ext/gtk/gstgtkbasesink.c +++ b/ext/gtk/gstgtkbasesink.c @@ -20,6 +20,7 @@ /** * SECTION:gtkgstsink + * @title: GstGtkBaseSink * */ diff --git a/ext/gtk/gstgtkglsink.c b/ext/gtk/gstgtkglsink.c index 2bdd331c6..56326882a 100644 --- a/ext/gtk/gstgtkglsink.c +++ b/ext/gtk/gstgtkglsink.c @@ -19,8 +19,8 @@ */ /** - * SECTION:gstgtkglsink - * + * SECTION:element-gtkglsink + * @title: gtkglsink */ #ifdef HAVE_CONFIG_H diff --git a/ext/gtk/gstgtksink.c b/ext/gtk/gstgtksink.c index e9f9d0cc7..ba8ea33ca 100644 --- a/ext/gtk/gstgtksink.c +++ b/ext/gtk/gstgtksink.c @@ -19,7 +19,8 @@ */ /** - * SECTION:gtkgstsink + * SECTION:element-gtkgstsink + * @title: gtkgstsink * */ diff --git a/ext/gtk/gtkgstglwidget.c b/ext/gtk/gtkgstglwidget.c index e780ebd38..ea0fe2f8a 100644 --- a/ext/gtk/gtkgstglwidget.c +++ b/ext/gtk/gtkgstglwidget.c @@ -41,6 +41,7 @@ /** * SECTION:gtkgstglwidget + * @title: GtkGstGlWidget * @short_description: a #GtkGLArea that renders GStreamer video #GstBuffers * @see_also: #GtkGLArea, #GstBuffer * diff --git a/ext/gtk/gtkgstwidget.c b/ext/gtk/gtkgstwidget.c index 5fe238a54..a936210ba 100644 --- a/ext/gtk/gtkgstwidget.c +++ b/ext/gtk/gtkgstwidget.c @@ -29,6 +29,7 @@ /** * SECTION:gtkgstwidget + * @title: GtkGstWidget * @short_description: a #GtkWidget that renders GStreamer video #GstBuffers * @see_also: #GtkDrawingArea, #GstBuffer * diff --git a/ext/hls/gsthlsdemux.c b/ext/hls/gsthlsdemux.c index b75e600ca..ea49fd0ee 100644 --- a/ext/hls/gsthlsdemux.c +++ b/ext/hls/gsthlsdemux.c @@ -26,15 +26,15 @@ */ /** * SECTION:element-hlsdemux + * @title: hlsdemux * * HTTP Live Streaming demuxer element. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 souphttpsrc location=http://devimages.apple.com/iphone/samples/bipbop/gear4/prog_index.m3u8 ! hlsdemux ! decodebin ! videoconvert ! videoscale ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/hls/gsthlssink.c b/ext/hls/gsthlssink.c index b56ef6c8f..9ae6ca6d3 100644 --- a/ext/hls/gsthlssink.c +++ b/ext/hls/gsthlssink.c @@ -19,15 +19,15 @@ /** * SECTION:element-hlssink + * @title: hlssink * * HTTP Live Streaming sink/server * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc is-live=true ! x264enc ! mpegtsmux ! hlssink max-files=5 * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/ext/iqa/iqa.c b/ext/iqa/iqa.c index de09e81f2..b91275f6f 100644 --- a/ext/iqa/iqa.c +++ b/ext/iqa/iqa.c @@ -19,6 +19,7 @@ /** * SECTION:element-iqa + * @title: iqa * @short_description: Image Quality Assessment plugin. * * IQA will perform full reference image quality assessment, with the @@ -48,13 +49,12 @@ * sink_2\=\(double\)0.0082939683976297474\;", * time=(guint64)0; * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -m uridecodebin uri=file:///test/file/1 ! iqa name=iqa do-dssim=true \ * ! videoconvert ! autovideosink uridecodebin uri=file:///test/file/2 ! iqa. * ]| This pipeline will output messages to the console for each set of compared frames. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/kate/gstkatedec.c b/ext/kate/gstkatedec.c index 88a56660d..65b29bfdb 100644 --- a/ext/kate/gstkatedec.c +++ b/ext/kate/gstkatedec.c @@ -45,33 +45,29 @@ /** * SECTION:element-katedec + * @title: katedec * @see_also: oggdemux * - * <refsect2> - * <para> * This element decodes Kate streams * <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec * for text based data, such as subtitles. Any number of kate streams can be * embedded in an Ogg stream. - * </para> - * <para> + * * libkate (see above url) is needed to build this plugin. - * </para> - * <title>Example pipeline</title> - * <para> + * + * ## Example pipeline + * * This explicitely decodes a Kate stream: - * <programlisting> + * |[ * gst-launch-1.0 filesrc location=test.ogg ! oggdemux ! katedec ! fakesink silent=TRUE - * </programlisting> - * </para> - * <para> + * ]| + * * This will automatically detect and use any Kate streams multiplexed * in an Ogg stream: - * <programlisting> + * |[ * gst-launch-1.0 playbin uri=file:///tmp/test.ogg - * </programlisting> - * </para> - * </refsect2> + * ]| + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/kate/gstkateenc.c b/ext/kate/gstkateenc.c index 12bd1c842..5b109c988 100644 --- a/ext/kate/gstkateenc.c +++ b/ext/kate/gstkateenc.c @@ -46,26 +46,23 @@ /** * SECTION:element-kateenc + * @title: kateenc * @see_also: oggmux * - * <refsect2> - * <para> * This element encodes Kate streams * <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec * for text based data, such as subtitles. Any number of kate streams can be * embedded in an Ogg stream. - * </para> - * <para> + * * libkate (see above url) is needed to build this plugin. - * </para> - * <title>Example pipeline</title> - * <para> + * + * ## Example pipeline + * * This encodes a DVD SPU track to a Kate stream: - * <programlisting> + * |[ * gst-launch-1.0 dvdreadsrc ! dvddemux ! dvdsubparse ! kateenc category=spu-subtitles ! oggmux ! filesink location=test.ogg - * </programlisting> - * </para> - * </refsect2> + * ]| + * */ /* FIXME: diff --git a/ext/kate/gstkateparse.c b/ext/kate/gstkateparse.c index 0cb9db0f3..83e3270d9 100644 --- a/ext/kate/gstkateparse.c +++ b/ext/kate/gstkateparse.c @@ -21,40 +21,35 @@ /** * SECTION:element-kateparse + * @title: kateparse * @short_description: parses kate streams * @see_also: katedec, vorbisparse, oggdemux, theoraparse * - * <refsect2> - * <para> * The kateparse element will parse the header packets of the Kate * stream and put them as the streamheader in the caps. This is used in the * multifdsink case where you want to stream live kate streams to multiple * clients, each client has to receive the streamheaders first before they can * consume the kate packets. - * </para> - * <para> + * * This element also makes sure that the buffers that it pushes out are properly * timestamped and that their offset and offset_end are set. The buffers that * kateparse outputs have all of the metadata that oggmux expects to receive, * which allows you to (for example) remux an ogg/kate file. - * </para> - * <title>Example pipelines</title> - * <para> - * <programlisting> + * + * ## Example pipelines + * + * |[ * gst-launch-1.0 -v filesrc location=kate.ogg ! oggdemux ! kateparse ! fakesink - * </programlisting> + * ]| * This pipeline shows that the streamheader is set in the caps, and that each * buffer has the timestamp, duration, offset, and offset_end set. - * </para> - * <para> - * <programlisting> + * + * |[ * gst-launch-1.0 filesrc location=kate.ogg ! oggdemux ! kateparse \ * ! oggmux ! filesink location=kate-remuxed.ogg - * </programlisting> + * ]| * This pipeline shows remuxing. kate-remuxed.ogg might not be exactly the same * as kate.ogg, but they should produce exactly the same decoded data. - * </para> - * </refsect2> * */ diff --git a/ext/kate/gstkatetag.c b/ext/kate/gstkatetag.c index e280917bc..4b2ec5706 100644 --- a/ext/kate/gstkatetag.c +++ b/ext/kate/gstkatetag.c @@ -21,46 +21,41 @@ /** * SECTION:element-katetag + * @title: katetag * @see_also: #oggdemux, #oggmux, #kateparse, #GstTagSetter * @short_description: retags kate streams * - * <refsect2> - * <para> * The katetag element can change the tag contained within a raw * kate stream. Specifically, it modifies the comments header packet * of the kate stream, as well as the language and category of the * kate stream. - * </para> - * <para> + * * The element will also process the stream as the #kateparse element does * so it can be used when remuxing an Ogg Kate stream, without additional * elements. - * </para> - * <para> + * * Applications can set the tags to write using the #GstTagSetter interface. * Tags contained within the kate stream will be picked up * automatically (and merged according to the merge mode set via the tag * setter interface). - * </para> - * <title>Example pipelines</title> - * <para> + * + * ## Example pipelines + * * This element is only useful with gst-launch-1.0 for modifying the language * and/or category (which are properties of the stream located in the kate * beginning of stream header), because it does not support setting the tags * on a #GstTagSetter interface. Conceptually, the element will usually be * used like: - * <programlisting> + * |[ * gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag ! oggmux ! filesink location=bar.ogg - * </programlisting> - * </para> - * <para> + * ]| + * * This pipeline will set the language and category of the stream to the * given values: - * <programlisting> + * |[ * gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag language=pt_BR category=subtitles ! oggmux ! filesink location=bar.ogg - * </programlisting> - * </para> - * </refsect2> + * ]| + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/kate/gstkatetiger.c b/ext/kate/gstkatetiger.c index e4f3ca934..21970c9f6 100644 --- a/ext/kate/gstkatetiger.c +++ b/ext/kate/gstkatetiger.c @@ -45,32 +45,29 @@ /** * SECTION:element-tiger + * @title: tiger * @see_also: katedec * - * <refsect2> - * <para> * This element decodes and renders Kate streams * <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec * for text based data, such as subtitles. Any number of kate streams can be * embedded in an Ogg stream. - * </para> - * <para> + * * libkate (see above url) and <ulink url="http://libtiger.googlecode.com/">libtiger</ulink> * are needed to build this element. - * </para> - * <title>Example pipeline</title> - * <para> + * + * ## Example pipeline + * * This pipeline renders a Kate stream on top of a Theora video multiplexed * in the same stream: - * <programlisting> + * |[ * gst-launch-1.0 \ * filesrc location=video.ogg ! oggdemux name=demux \ * demux. ! queue ! theoradec ! videoconvert ! tiger name=tiger \ * demux. ! queue ! kateparse ! tiger. \ * tiger. ! videoconvert ! autovideosink - * </programlisting> - * </para> - * </refsect2> + * ]| + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/ladspa/gstladspa.c b/ext/ladspa/gstladspa.c index 624cbd81a..8fea1096a 100644 --- a/ext/ladspa/gstladspa.c +++ b/ext/ladspa/gstladspa.c @@ -22,6 +22,7 @@ /** * SECTION:element-ladspa + * @title: ladspa * @short_description: bridge for LADSPA (Linux Audio Developer's Simple Plugin API) * @see_also: #GstAudioConvert #GstAudioResample, #GstAudioTestSrc, #GstAutoAudioSink * @@ -32,8 +33,7 @@ * element classification. The functionality you get depends on the LADSPA plugins * you have installed. * - * <refsect2> - * <title>Example LADSPA line without this plugins</title> + * ## Example LADSPA line without this plugins * |[ * (padsp) listplugins * (padsp) analyseplugin cmt.so amp_mono @@ -41,16 +41,13 @@ * (padsp) applyplugin testin.wav testout.wav cmt.so amp_mono 2 * gst-launch-1.0 playbin uri=file://"$PWD"/testout.wav * ]| Decode any audio file into wav with the format expected for the specific ladspa plugin to be applied, apply the ladspa filter and play it. - * </refsect2> * * Now with this plugin: * - * <refsect2> - * <title>Example LADSPA line with this plugins</title> + * ## Example LADSPA line with this plugins * |[ * gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4 * ]| Get audio input, filter it through CAPS Plate and TAP Stereo Echo, play it and show a visualization (recommended hearphones). - * </refsect2> * * In case you wonder the plugin naming scheme, quoting ladspa.h: * "Plugin types should be identified by file and label rather than by @@ -61,60 +58,52 @@ * on top of the audio in and out one, so some parameters are readable too. * * You can see the listing of plugins available with: - * <refsect2> - * <title>Inspecting the plugins list</title> + * + * ## Inspecting the plugins list * |[ * gst-inspect ladspa * ]| List available LADSPA plugins on gstreamer. - * </refsect2> * * You can see the parameters of any plugin with: - * <refsect2> - * <title>Inspecting the plugins</title> + * + * ## Inspecting the plugins * |[ * gst-inspect ladspa-retro-flange-1208-so-retroflange * ]| List details of the plugin, parameters, range and defaults included. - * </refsect2> * * The elements categorize in: - * <itemizedlist> - * <listitem><para>Filter/Effect/Audio/LADSPA:</para> - * <refsect2> - * <title>Example Filter/Effect/Audio/LADSPA line with this plugins</title> + * + * * Filter/Effect/Audio/LADSPA: + * + * ## Example Filter/Effect/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 filesrc location="$myfile" ! decodebin ! audioconvert ! audioresample ! ladspa-calf-so-reverb decay-time=15 high-frq-damp=20000 room-size=5 diffusion=1 wet-amount=2 dry-amount=2 pre-delay=50 bass-cut=20000 treble-cut=20000 ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! autoaudiosink * ]| Decode any audio file, filter it through Calf Reverb LADSPA then TAP Stereo Echo, and play it. - * </refsect2> - * </listitem> - * <listitem><para>Source/Audio/LADSPA:</para> - * <refsect2> - * <title>Example Source/Audio/LADSPA line with this plugins</title> + * + * * Source/Audio/LADSPA: + * + * ## Example Source/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 ladspasrc-sine-so-sine-fcac frequency=220 amplitude=100 ! audioconvert ! autoaudiosink * ]| Generate a sine wave with Sine Oscillator (Freq:control, Amp:control) and play it. - * </refsect2> - * <refsect2> - * <title>Example Source/Audio/LADSPA line with this plugins</title> + * + * ## Example Source/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 ladspasrc-caps-so-click bpm=240 volume=1 ! autoaudiosink * ]| Generate clicks with CAPS Click - Metronome at 240 beats per minute and play it. - * </refsect2> - * <refsect2> - * <title>Example Source/Audio/LADSPA line with this plugins</title> + * + * ## Example Source/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 ladspasrc-random-1661-so-random-fcsc-oa ! ladspa-cmt-so-amp-mono gain=1.5 ! ladspa-caps-so-plate ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! wavescope ! videoconvert ! autovideosink * ]| Generate random wave, filter it trhough Mono Amplifier and Versatile Plate Reverb, and play, while showing, it. - * </refsect2> - * </listitem> - * <listitem><para>Sink/Audio/LADSPA:</para> - * <refsect2> - * <title>Example Sink/Audio/LADSPA line with this plugins</title> + * + * * Sink/Audio/LADSPA: + * + * ## Example Sink/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! audioconvert ! audioresample ! queue ! ladspasink-cmt-so-null-ai myT. ! audioconvert ! audioresample ! queue ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4 * ]| Get audio input, filter it trhough Mono Amplifier, CAPS Plate LADSPA and TAP Stereo Echo, explicitily anulate audio with Null (Audio Output), and play a visualization (recommended hearphones). - * </refsect2> - * </listitem> - * </itemizedlist> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/libde265/libde265-dec.c b/ext/libde265/libde265-dec.c index ab376c505..109edadba 100644 --- a/ext/libde265/libde265-dec.c +++ b/ext/libde265/libde265-dec.c @@ -21,15 +21,15 @@ /** * SECTION:element-libde265dec + * @title: libde265dec * * Decodes HEVC/H.265 video. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 filesrc location=bitstream.hevc ! 'video/x-hevc,stream-format=byte-stream,framerate=25/1' ! libde265dec ! autovideosink * ]| The above pipeline decodes the HEVC/H.265 bitstream and renders it to the screen. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/lv2/gstlv2.c b/ext/lv2/gstlv2.c index acad44355..2c8253dc8 100644 --- a/ext/lv2/gstlv2.c +++ b/ext/lv2/gstlv2.c @@ -22,6 +22,7 @@ /** * SECTION:element-lv2 + * @title: lv2 * @short_description: bridge for LV2. * * LV2 is a standard for plugins and matching host applications, diff --git a/ext/openal/gstopenalsink.c b/ext/openal/gstopenalsink.c index 66de05113..eb9b9bace 100644 --- a/ext/openal/gstopenalsink.c +++ b/ext/openal/gstopenalsink.c @@ -24,6 +24,7 @@ /** * SECTION:element-openalsink + * @title: openalsink * @see_also: openalsrc * @short_description: capture raw audio samples through OpenAL * @@ -31,8 +32,7 @@ * * Unfortunately the capture API doesn't have a format enumeration/check. all you can do is try opening it and see if it works. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 audiotestsrc ! audioconvert ! volume volume=0.5 ! openalsink * ]| will play a sine wave (continuous beep sound) through OpenAL. @@ -42,7 +42,7 @@ * |[ * gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink * ]| will capture and play audio through OpenAL. - * </refsect2> + * */ /* diff --git a/ext/openal/gstopenalsrc.c b/ext/openal/gstopenalsrc.c index 26e73d49b..4f1e26e77 100644 --- a/ext/openal/gstopenalsrc.c +++ b/ext/openal/gstopenalsrc.c @@ -49,20 +49,20 @@ /** * SECTION:element-openalsrc + * @title: openalsrc * @see_also: openalsink * @short_description: capture raw audio samples through OpenAL * * This element captures raw audio samples through OpenAL. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v openalsrc ! audioconvert ! wavenc ! filesink location=stream.wav * ]| * will capture sound through OpenAL and encode it to a wav file. * |[ * gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink * ]| will capture and play audio through OpenAL. - * </refsect2> + * */ /* diff --git a/ext/opus/gstopusparse.c b/ext/opus/gstopusparse.c index 56e8bb838..4408af6d3 100644 --- a/ext/opus/gstopusparse.c +++ b/ext/opus/gstopusparse.c @@ -22,16 +22,16 @@ /** * SECTION:element-opusparse + * @title: opusparse * @see_also: opusenc, opusdec * * This element parses OPUS packets. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v filesrc location=opusdata ! opusparse ! opusdec ! audioconvert ! audioresample ! alsasink * ]| Decode and plays an unmuxed Opus file. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/resindvd/rsndec.c b/ext/resindvd/rsndec.c index 02d3eb645..4f4b2680f 100644 --- a/ext/resindvd/rsndec.c +++ b/ext/resindvd/rsndec.c @@ -369,7 +369,7 @@ rsn_dec_get_type (void) return type; } -/** Audio decoder subclass */ +/* Audio decoder subclass */ static GstStaticPadTemplate audio_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, @@ -422,7 +422,7 @@ rsn_audiodec_init (RsnAudioDec * self) { } -/** Video decoder subclass */ +/* Video decoder subclass */ static GstStaticPadTemplate video_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, diff --git a/ext/resindvd/rsninputselector.c b/ext/resindvd/rsninputselector.c index 3849baff4..678311603 100644 --- a/ext/resindvd/rsninputselector.c +++ b/ext/resindvd/rsninputselector.c @@ -25,6 +25,7 @@ /** * SECTION:element-input-selector + * @title: input-selector * @see_also: #GstOutputSelector * * Direct one out of N input streams to the output pad. @@ -32,21 +33,11 @@ * The input pads are from a GstPad subclass and have additional * properties, which users may find useful, namely: * - * <itemizedlist> - * <listitem> - * "running-time": Running time of stream on pad (#gint64) - * </listitem> - * <listitem> - * "tags": The currently active tags on the pad (#GstTagList, boxed type) - * </listitem> - * <listitem> - * "active": If the pad is currently active (#gboolean) - * </listitem> - * <listitem> - * "always-ok" : Make an inactive pads return #GST_FLOW_OK instead of - * #GST_FLOW_NOT_LINKED - * </listitem> - * </itemizedlist> + * * "running-time": Running time of stream on pad (#gint64) + * * "tags": The currently active tags on the pad (#GstTagList, boxed type) + * * "active": If the pad is currently active (#gboolean) + * * "always-ok" : Make an inactive pads return #GST_FLOW_OK instead of #GST_FLOW_NOT_LINKED + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/rsvg/gstrsvgdec.c b/ext/rsvg/gstrsvgdec.c index 5d5fc8ef6..e50a8978d 100644 --- a/ext/rsvg/gstrsvgdec.c +++ b/ext/rsvg/gstrsvgdec.c @@ -19,15 +19,15 @@ */ /** * SECTION:element-rsvgdec + * @title: rsvgdec * * This elements renders SVG graphics. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 filesrc location=image.svg ! rsvgdec ! imagefreeze ! videoconvert ! autovideosink * ]| render and show a svg image. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/rsvg/gstrsvgoverlay.c b/ext/rsvg/gstrsvgoverlay.c index b467dc875..5f661b9ad 100644 --- a/ext/rsvg/gstrsvgoverlay.c +++ b/ext/rsvg/gstrsvgoverlay.c @@ -21,6 +21,7 @@ /** * SECTION:element-rsvgoverlay + * @title: rsvgoverlay * * This elements overlays SVG graphics over the video. SVG data can * either be specified through properties, or fed through the @@ -44,8 +45,7 @@ * the values of the x/y/width/height attributes, by setting * height-/width-relative to 1.0. and all other attributes to 0. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay location=foo.svg ! videoconvert ! autovideosink * ]| specifies the SVG location through the filename property. @@ -55,7 +55,7 @@ * |[ * gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay data='<svg viewBox="0 0 800 600"><image x="80%" y="80%" width="10%" height="10%" xlink:href="foo.jpg" /></svg>' ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/rtmp/gstrtmpsink.c b/ext/rtmp/gstrtmpsink.c index 1815d9c64..4e07cc692 100644 --- a/ext/rtmp/gstrtmpsink.c +++ b/ext/rtmp/gstrtmpsink.c @@ -20,6 +20,7 @@ /** * SECTION:element-rtmpsink + * @title: rtmpsink * * This element delivers data to a streaming server via RTMP. It uses * librtmp, and supports any protocols/urls that librtmp supports. @@ -27,12 +28,11 @@ * for librtmp, such as 'flashver=version'. See the librtmp documentation * for more detail * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! ffenc_flv ! flvmux ! rtmpsink location='rtmp://localhost/path/to/stream live=1' * ]| Encode a test video stream to FLV video format and stream it via RTMP. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/rtmp/gstrtmpsrc.c b/ext/rtmp/gstrtmpsrc.c index db620cc1f..69cec91a6 100644 --- a/ext/rtmp/gstrtmpsrc.c +++ b/ext/rtmp/gstrtmpsrc.c @@ -26,17 +26,17 @@ /** * SECTION:element-rtmpsrc + * @title: rtmpsrc * * This plugin reads data from a local or remote location specified * by an URI. This location can be specified using any protocol supported by * the RTMP library, i.e. rtmp, rtmpt, rtmps, rtmpe, rtmfp, rtmpte and rtmpts. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 -v rtmpsrc location=rtmp://somehost/someurl ! fakesink * ]| Open an RTMP location and pass its content to fakesink. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/sbc/gstsbcdec.c b/ext/sbc/gstsbcdec.c index 5019e8326..84b515709 100644 --- a/ext/sbc/gstsbcdec.c +++ b/ext/sbc/gstsbcdec.c @@ -21,15 +21,15 @@ /** * SECTION:element-sbdec + * @title: sbdec * * This element decodes a Bluetooth SBC audio streams to raw integer PCM audio * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v filesrc location=audio.sbc ! sbcparse ! sbcdec ! audioconvert ! audioresample ! autoaudiosink * ]| Decode a raw SBC file. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/sbc/gstsbcenc.c b/ext/sbc/gstsbcenc.c index 71aca0753..c25cd32be 100644 --- a/ext/sbc/gstsbcenc.c +++ b/ext/sbc/gstsbcenc.c @@ -20,6 +20,7 @@ /** * SECTION:element-sbenc + * @title: sbenc * * This element encodes raw integer PCM audio into a Bluetooth SBC audio. * @@ -27,12 +28,11 @@ * allocation-mode can be set by adding a capsfilter element with appropriate * filtercaps after the sbcenc encoder element. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v audiotestsrc ! sbcenc ! rtpsbcpay ! udpsink * ]| Encode a sine wave into SBC, RTP payload it and send over the network using UDP - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/smoothstreaming/gstmssdemux.c b/ext/smoothstreaming/gstmssdemux.c index 120d9c22b..0fdea3b4d 100644 --- a/ext/smoothstreaming/gstmssdemux.c +++ b/ext/smoothstreaming/gstmssdemux.c @@ -22,10 +22,10 @@ /** * SECTION:element-mssdemux + * @title: mssdemux * * Demuxes a Microsoft's Smooth Streaming manifest into its audio and/or video streams. * - * */ /* diff --git a/ext/spandsp/gstdtmfdetect.c b/ext/spandsp/gstdtmfdetect.c index 362edb4f3..df7fc7ef1 100644 --- a/ext/spandsp/gstdtmfdetect.c +++ b/ext/spandsp/gstdtmfdetect.c @@ -24,36 +24,21 @@ /** * SECTION:element-dtmfdetect + * @title: dtmfdetect * @short_description: Detects DTMF tones * * This element will detect DTMF tones and emit messages. * - * The message is called <classname>"dtmf-event"</classname> and has - * the following fields: - * <itemizedlist> - * <listitem> - * <para> - * gint <classname>type</classname> (0-1): - * The application uses this field to specify which of the two methods + * The message is called `dtmf-event` and has the following fields: + * + * * gint `type` (0-1): The application uses this field to specify which of the two methods * specified in RFC 2833 to use. The value should be 0 for tones and 1 for * named events. Tones are specified by their frequencies and events are * specfied by their number. This element can only take events as input. * Do not confuse with "method" which specified the output. - * </para> - * </listitem> - * <listitem> - * <para> - * gint <classname>number</classname> (0-16): - * The event number. - * </para> - * </listitem> - * <listitem> - * <para> - * gint <classname>method</classname> (2): - * This field will always been 2 (ie sound) from this element. - * </para> - * </listitem> - * </itemizedlist> + * * gint `number` (0-16): The event number. + * * gint `method` (2): This field will always been 2 (ie sound) from this element. + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/spandsp/gstspanplc.c b/ext/spandsp/gstspanplc.c index 06e109046..f4ccf7689 100644 --- a/ext/spandsp/gstspanplc.c +++ b/ext/spandsp/gstspanplc.c @@ -20,6 +20,7 @@ /** * SECTION:element-spanplc + * @title: spanplc * * The spanplc (Packet Loss Concealment) element provides a synthetic * fill-in signal, to minimise the audible effect of lost packets in diff --git a/ext/srtp/gstsrtpdec.c b/ext/srtp/gstsrtpdec.c index 9967dd543..e1be70e0a 100644 --- a/ext/srtp/gstsrtpdec.c +++ b/ext/srtp/gstsrtpdec.c @@ -46,6 +46,7 @@ /** * SECTION:element-srtpdec + * @title: srtpdec * @see_also: srtpenc * * gstrtpdec acts as a decoder that removes security from SRTP and SRTCP @@ -95,8 +96,7 @@ * other means. If no rollover counter is provided by the user, 0 is * used by default. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 udpsrc port=5004 caps='application/x-srtp, payload=(int)8, ssrc=(uint)1356955624, srtp-key=(buffer)012345678901234567890123456789012345678901234567890123456789, srtp-cipher=(string)aes-128-icm, srtp-auth=(string)hmac-sha1-80, srtcp-cipher=(string)aes-128-icm, srtcp-auth=(string)hmac-sha1-80' ! srtpdec ! rtppcmadepay ! alawdec ! pulsesink * ]| Receive PCMA SRTP packets through UDP using caps to specify @@ -105,7 +105,7 @@ * gst-launch-1.0 audiotestsrc ! alawenc ! rtppcmapay ! 'application/x-rtp, payload=(int)8, ssrc=(uint)1356955624' ! srtpenc key="012345678901234567890123456789012345678901234567890123456789" ! udpsink port=5004 * ]| Send PCMA SRTP packets through UDP, nothing how the SSRC is forced so * that the receiver will recognize it. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/srtp/gstsrtpenc.c b/ext/srtp/gstsrtpenc.c index c9ae98070..ff17fd03f 100644 --- a/ext/srtp/gstsrtpenc.c +++ b/ext/srtp/gstsrtpenc.c @@ -45,7 +45,8 @@ */ /** - * SECTION:gst-plugin-bad-plugins-srtpenc + * SECTION:element-srtpenc + * @title: srtpenc * @see_also: srtpdec * * gstrtpenc acts as an encoder that adds security to RTP and RTCP diff --git a/ext/teletextdec/gstteletextdec.c b/ext/teletextdec/gstteletextdec.c index 7f8a8a690..d754e972c 100644 --- a/ext/teletextdec/gstteletextdec.c +++ b/ext/teletextdec/gstteletextdec.c @@ -21,16 +21,16 @@ /** * SECTION:element-teletextdec + * @title: teletextdec * * Decode a stream of raw VBI packets containing teletext information to a RGBA * stream. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v -m filesrc location=recording.mpeg ! tsdemux ! teletextdec ! videoconvert ! ximagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/ttml/gstttmlparse.c b/ext/ttml/gstttmlparse.c index 6de9a1f75..e6f21bee1 100644 --- a/ext/ttml/gstttmlparse.c +++ b/ext/ttml/gstttmlparse.c @@ -22,6 +22,7 @@ /** * SECTION:element-ttmlparse + * @title: ttmlparse * * Parses timed text subtitle files described using Timed Text Markup Language * (TTML). Currently, only the EBU-TT-D profile of TTML, designed for @@ -35,13 +36,12 @@ * elements. A downstream renderer element uses this information to correctly * render the text on top of video frames. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink * ]| Parse and render TTML subtitles contained in a single XML file over an * MP4 stream containing H.264 video and AAC audio. - * </refsect2> + * */ #include <stdio.h> diff --git a/ext/ttml/gstttmlrender.c b/ext/ttml/gstttmlrender.c index 2648facb9..ee1cb974d 100644 --- a/ext/ttml/gstttmlrender.c +++ b/ext/ttml/gstttmlrender.c @@ -25,19 +25,19 @@ /** * SECTION:element-ttmlrender + * @title: ttmlrender * * Renders timed text on top of a video stream. It receives text in buffers * from a ttmlparse element; each text string is in its own #GstMemory within * the GstBuffer, and the styling and layout associated with each text string * is in metadata attached to the #GstBuffer. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink * ]| Parse and render TTML subtitles contained in a single XML file over an * MP4 stream containing H.264 video and AAC audio: - * </refsect2> + * */ #include <gst/video/video.h> diff --git a/ext/ttml/subtitle.c b/ext/ttml/subtitle.c index b8843a379..e0c32faa1 100644 --- a/ext/ttml/subtitle.c +++ b/ext/ttml/subtitle.c @@ -20,6 +20,7 @@ /** * SECTION:gstsubtitle + * @title: GstSubtitle * @short_description: Library for describing sets of static subtitles. * * This library enables the description of static text scenes made up of a diff --git a/ext/ttml/subtitlemeta.c b/ext/ttml/subtitlemeta.c index 69da5f58b..cdbfcce24 100644 --- a/ext/ttml/subtitlemeta.c +++ b/ext/ttml/subtitlemeta.c @@ -20,6 +20,7 @@ /** * SECTION:gstsubtitlemeta + * @title: GstSubtitleMeta * @short_description: Metadata class for timed-text subtitles. * * The GstSubtitleMeta class enables the layout and styling information needed diff --git a/ext/voaacenc/gstvoaacenc.c b/ext/voaacenc/gstvoaacenc.c index 9c48ea205..0580d27f0 100644 --- a/ext/voaacenc/gstvoaacenc.c +++ b/ext/voaacenc/gstvoaacenc.c @@ -19,16 +19,16 @@ /** * SECTION:element-voaacenc + * @title: voaacenc * - * AAC audio encoder based on vo-aacenc library + * AAC audio encoder based on vo-aacenc library * <ulink url="http://sourceforge.net/projects/opencore-amr/files/vo-aacenc/">vo-aacenc library source file</ulink>. - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch-1.0 filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voaacenc ! filesink location=abc.aac * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/voamrwbenc/gstvoamrwbenc.c b/ext/voamrwbenc/gstvoamrwbenc.c index f84e576a1..c5eae31d7 100644 --- a/ext/voamrwbenc/gstvoamrwbenc.c +++ b/ext/voamrwbenc/gstvoamrwbenc.c @@ -19,19 +19,19 @@ /** * SECTION:element-voamrwbenc + * @title: voamrwbenc * @see_also: #GstAmrWbDec, #GstAmrWbParse * - * AMR wideband encoder based on the + * AMR wideband encoder based on the * <ulink url="http://www.penguin.cz/~utx/amr">reference codec implementation</ulink>. - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voamrwbenc ! filesink location=abc.amr * ]| * Please note that the above stream misses the header, that is needed to play * the stream. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/vulkan/gstvulkan.c b/ext/vulkan/gstvulkan.c index 7158895a2..d553e8760 100644 --- a/ext/vulkan/gstvulkan.c +++ b/ext/vulkan/gstvulkan.c @@ -20,6 +20,7 @@ /** * SECTION:plugin-vulkan + * @title: vulkan * * Cross-platform Vulkan plugin. */ diff --git a/ext/vulkan/vkbuffermemory.c b/ext/vulkan/vkbuffermemory.c index e54d15930..d3b199e74 100644 --- a/ext/vulkan/vkbuffermemory.c +++ b/ext/vulkan/vkbuffermemory.c @@ -26,6 +26,7 @@ /** * SECTION:vkbuffermemory + * @title: vkbuffermemory * @short_description: memory subclass for Vulkan buffer memory * @see_also: #GstMemory, #GstAllocator * @@ -420,7 +421,7 @@ gst_vulkan_buffer_memory_init_once (void) /** * gst_is_vulkan_buffer_memory: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstVulkanBufferMemory */ gboolean diff --git a/ext/vulkan/vkbufferpool.c b/ext/vulkan/vkbufferpool.c index 01fb2fba8..df64358e1 100644 --- a/ext/vulkan/vkbufferpool.c +++ b/ext/vulkan/vkbufferpool.c @@ -26,6 +26,7 @@ /** * SECTION:vkbufferpool + * @title: GstVulkanBufferPool * @short_description: buffer pool for #GstVulkanBufferMemory objects * @see_also: #GstBufferPool, #GstVulkanBufferMemory * @@ -33,7 +34,7 @@ * * A #GstVulkanBufferPool is created with gst_vulkan_buffer_pool_new() * - * #GstVulkanBufferPool implements the VideoMeta buffer pool option + * #GstVulkanBufferPool implements the VideoMeta buffer pool option * #GST_BUFFER_POOL_OPTION_VIDEO_META */ diff --git a/ext/vulkan/vkimagememory.c b/ext/vulkan/vkimagememory.c index 074cf9ac0..025443416 100644 --- a/ext/vulkan/vkimagememory.c +++ b/ext/vulkan/vkimagememory.c @@ -26,6 +26,7 @@ /** * SECTION:vkimagememory + * @title: GstVkImageMemory * @short_description: memory subclass for Vulkan image memory * @see_also: #GstMemory, #GstAllocator * @@ -559,7 +560,7 @@ gst_vulkan_image_memory_init_once (void) /** * gst_is_vulkan_image_memory: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstVulkanImageMemory */ gboolean diff --git a/ext/vulkan/vkmemory.c b/ext/vulkan/vkmemory.c index 52855c13f..c21c34daa 100644 --- a/ext/vulkan/vkmemory.c +++ b/ext/vulkan/vkmemory.c @@ -28,11 +28,12 @@ /** * SECTION:vkmemory + * @title: GstVkMemory * @short_description: memory subclass for Vulkan device memory * @see_also: #GstMemory, #GstAllocator * * GstVulkanMemory is a #GstMemory subclass providing support for the mapping of - * Vulkan device memory. + * Vulkan device memory. */ /* WARNING: while suballocation is allowed, nothing prevents aliasing which @@ -347,7 +348,7 @@ gst_vulkan_memory_init_once (void) /** * gst_is_vulkan_memory: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstVulkanMemory */ gboolean diff --git a/ext/vulkan/vksink.c b/ext/vulkan/vksink.c index 586d2861d..c9893deeb 100644 --- a/ext/vulkan/vksink.c +++ b/ext/vulkan/vksink.c @@ -20,6 +20,7 @@ /** * SECTION:element-vulkansink + * @title: vulkansink * * vulkansink renders video frames to a drawable on a local or remote * display using Vulkan. diff --git a/ext/vulkan/vkupload.c b/ext/vulkan/vkupload.c index 4988c640a..8839630e9 100644 --- a/ext/vulkan/vkupload.c +++ b/ext/vulkan/vkupload.c @@ -20,6 +20,7 @@ /** * SECTION:element-vulkanupload + * @title: vulkanupload * * vulkanupload uploads data into Vulkan memory objects. */ diff --git a/ext/vulkan/vkwindow.c b/ext/vulkan/vkwindow.c index ab8c45c5f..6846e3156 100644 --- a/ext/vulkan/vkwindow.c +++ b/ext/vulkan/vkwindow.c @@ -19,7 +19,7 @@ */ /** - * SECTION:gstglwindow + * SECTION:vkwindow * @short_description: window/surface abstraction * @title: GstVulkanWindow * @see_also: #GstGLContext, #GstGLDisplay diff --git a/ext/wayland/gstwaylandsink.c b/ext/wayland/gstwaylandsink.c index 0d974196d..46a710fd0 100644 --- a/ext/wayland/gstwaylandsink.c +++ b/ext/wayland/gstwaylandsink.c @@ -23,18 +23,18 @@ /** * SECTION:element-waylandsink + * @title: waylandsink * * The waylandsink is creating its own window and render the decoded video frames to that. * Setup the Wayland environment as described in * <ulink url="http://wayland.freedesktop.org/building.html">Wayland</ulink> home page. * The current implementaion is based on weston compositor. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v videotestsrc ! waylandsink * ]| test the video rendering in wayland - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/wildmidi/gstwildmidi.c b/ext/wildmidi/gstwildmidi.c index 7f380d6da..89bdcebb3 100644 --- a/ext/wildmidi/gstwildmidi.c +++ b/ext/wildmidi/gstwildmidi.c @@ -21,6 +21,7 @@ /** * SECTION:element-wildmidi + * @title: wildmidi * @see_also: timidity * * This element renders midi-files as audio streams using @@ -29,13 +30,12 @@ * uses the same sound-patches as timidity (it tries the path in $WILDMIDI_CFG, * $HOME/.wildmidirc and /etc/wildmidi.cfg) * - * <refsect2> - * <title>Example pipeline</title> + * ## Example pipeline * |[ * gst-launch-1.0 filesrc location=song.mid ! wildmidi ! alsasink * ]| This example pipeline will parse the midi and render to raw audio which is * played via alsa. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/x265/gstx265enc.c b/ext/x265/gstx265enc.c index b48ed0b07..2f22a4d21 100644 --- a/ext/x265/gstx265enc.c +++ b/ext/x265/gstx265enc.c @@ -22,6 +22,7 @@ /** * SECTION:element-x265enc + * @title: x265enc * * This element encodes raw video into H265 compressed data. * diff --git a/ext/zbar/gstzbar.c b/ext/zbar/gstzbar.c index e84f48c2f..18149eb5e 100644 --- a/ext/zbar/gstzbar.c +++ b/ext/zbar/gstzbar.c @@ -19,64 +19,31 @@ /** * SECTION:element-zbar + * @title: zbar * * Detect bar codes in the video streams and send them as element messages to * the #GstBus if .#GstZBar:message property is %TRUE. * If the .#GstZBar:attach-frame property is %TRUE, the posted barcode message * includes a sample of the frame where the barcode was detected (Since 1.6). * - * The element generate messages named - * <classname>"barcode"</classname>. The structure containes these - * fields: - * <itemizedlist> - * <listitem> - * <para> - * #GstClockTime - * <classname>"timestamp"</classname>: - * the timestamp of the buffer that triggered the message. - * </para> - * </listitem> - * <listitem> - * <para> - * gchar* - * <classname>"type"</classname>: - * the symbol type. - * </para> - * </listitem> - * <listitem> - * <para> - * gchar* - * <classname>"symbol"</classname>: - * the deteted bar code data. - * </para> - * </listitem> - * <listitem> - * <para> - * gint - * <classname>"quality"</classname>: - * an unscaled, relative quantity: larger values are better than smaller + * The element generate messages named`barcode`. The structure containes these fields: + * + * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message. + * * gchar * `type`: the symbol type. + * * gchar * `symbol`: the deteted bar code data. + * * gint `quality`: an unscaled, relative quantity: larger values are better than smaller * values. - * </para> - * </listitem> - * <listitem> - * <para> - * GstSample - * <classname>"frame"</classname>: - * the frame in which the barcode message was detected, if + * * GstSample `frame`: the frame in which the barcode message was detected, if * the .#GstZBar:attach-frame property was set to %TRUE (Since 1.6) - * </para> - * </listitem> - * </itemizedlist> * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 -m v4l2src ! videoconvert ! zbar ! videoconvert ! xvimagesink * ]| This pipeline will detect barcodes and send them as messages. * |[ * gst-launch-1.0 -m v4l2src ! tee name=t ! queue ! videoconvert ! zbar ! fakesink t. ! queue ! xvimagesink * ]| Same as above, but running the filter on a branch to keep the display in color - * </refsect2> + * */ #ifdef HAVE_CONFIG_H |