diff options
author | Thibault Saunier <thibault.saunier@osg.samsung.com> | 2017-03-08 15:01:13 -0300 |
---|---|---|
committer | Thibault Saunier <thibault.saunier@osg.samsung.com> | 2017-04-12 12:57:57 -0300 |
commit | 78022a6e0c05ce482b798cf638cbd3f901a5094e (patch) | |
tree | 9725b5939d840f6eb3e8f39e032866e82413cb83 | |
parent | 90f766cc518b4fde651162d6e0c7d190373e3c61 (diff) | |
download | gstreamer-plugins-bad-78022a6e0c05ce482b798cf638cbd3f901a5094e.tar.gz |
docs: Port all docstring to gtk-doc markdown
257 files changed, 941 insertions, 1139 deletions
diff --git a/ext/assrender/gstassrender.c b/ext/assrender/gstassrender.c index a3629bfd0..07e33d452 100644 --- a/ext/assrender/gstassrender.c +++ b/ext/assrender/gstassrender.c @@ -20,15 +20,15 @@ /** * SECTION:element-assrender + * @title: assrender * * Renders timestamped SSA/ASS subtitles on top of a video stream. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink d. ! queue ! h264parse ! avdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/bs2b/gstbs2b.c b/ext/bs2b/gstbs2b.c index dd29983a9..dc597087f 100644 --- a/ext/bs2b/gstbs2b.c +++ b/ext/bs2b/gstbs2b.c @@ -20,17 +20,17 @@ /** * SECTION:element-bs2b + * @title: bs2b * - * Improve headphone listening of stereo audio records using the bs2b library. + * Improve headphone listening of stereo audio records using the bs2b library. * It does so by mixing the left and right channel in a way that simulates * a stereo speaker setup while using headphones. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 audiotestsrc ! "audio/x-raw,channel-mask=(bitmask)0x1" ! interleave name=i ! bs2b ! autoaudiosink audiotestsrc freq=330 ! "audio/x-raw,channel-mask=(bitmask)0x2" ! i. * ]| Play two independent sine test sources and crossfeed them. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/chromaprint/gstchromaprint.c b/ext/chromaprint/gstchromaprint.c index 98b3ae392..6ebadd31a 100644 --- a/ext/chromaprint/gstchromaprint.c +++ b/ext/chromaprint/gstchromaprint.c @@ -23,18 +23,18 @@ /** * SECTION:element-chromaprint + * @title: chromaprint * * The chromaprint element calculates an acoustic fingerprint for an * audio stream which can be used to identify a song and look up * further metadata from the <ulink url="http://acoustid.org/">Acoustid</ulink> * and Musicbrainz databases. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -m uridecodebin uri=file:///path/to/song.ogg ! audioconvert ! chromaprint ! fakesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlbasesink.c b/ext/curl/gstcurlbasesink.c index 40c61826d..375c46aef 100644 --- a/ext/curl/gstcurlbasesink.c +++ b/ext/curl/gstcurlbasesink.c @@ -19,14 +19,14 @@ /** * SECTION:element-curlsink + * @title: curlsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * a server (e.g. a HTTP/FTP server). * - * <refsect2> - * <title>Example launch line (upload a JPEG file to an HTTP server)</title> + * ## Example launch line (upload a JPEG file to an HTTP server) * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsink \ * file-name=image.jpg \ @@ -35,7 +35,7 @@ * content-type=image/jpeg \ * use-content-length=false * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlfilesink.c b/ext/curl/gstcurlfilesink.c index b4aa1747b..519c135ac 100644 --- a/ext/curl/gstcurlfilesink.c +++ b/ext/curl/gstcurlfilesink.c @@ -19,21 +19,20 @@ /** * SECTION:element-curlfilesink + * @title: curlfilesink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * a local or network drive. * - * <refsect2> - * <title>Example launch line (upload a JPEG file to /home/test/images - * directory)</title> + * ## Example launch line (upload a JPEG file to /home/test/images directory) * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlfilesink \ * file-name=image.jpg \ * location=file:///home/test/images/ * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlftpsink.c b/ext/curl/gstcurlftpsink.c index 92b5c4e08..55b6f6a9a 100644 --- a/ext/curl/gstcurlftpsink.c +++ b/ext/curl/gstcurlftpsink.c @@ -19,21 +19,23 @@ /** * SECTION:element-curlftpsink + * @title: curlftpsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * an FTP server. * - * <refsect2> - * <title>Example launch line (upload a JPEG file to /home/test/images - * directory)</title> + * ## Example launch line + * + * Upload a JPEG file to /home/test/images * directory) + * * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlftpsink \ * file-name=image.jpg \ * location=ftp://192.168.0.1/images/ * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlhttpsink.c b/ext/curl/gstcurlhttpsink.c index a50a057e4..3df255574 100644 --- a/ext/curl/gstcurlhttpsink.c +++ b/ext/curl/gstcurlhttpsink.c @@ -19,14 +19,17 @@ /** * SECTION:element-curlhttpsink + * @title: curlhttpsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * an HTTP server. * - * <refsect2> - * <title>Example launch line (upload a JPEG file to an HTTP server)</title> + * ## Example launch line + * + * Upload a JPEG file to an HTTP server. + * * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlhttpsink \ * file-name=image.jpg \ @@ -35,7 +38,6 @@ * content-type=image/jpeg \ * use-content-length=false * ]| - * </refsect2> */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlsftpsink.c b/ext/curl/gstcurlsftpsink.c index 939ebd8b9..c3549c81f 100644 --- a/ext/curl/gstcurlsftpsink.c +++ b/ext/curl/gstcurlsftpsink.c @@ -19,14 +19,17 @@ /** * SECTION:element-curlsftpsink + * @title: curlsftpsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * a SFTP (SSH File Transfer Protocol) server. * - * <refsect2> - * <title>Example launch line (upload a file to /home/john/sftp_tests/)</title> + * ## Example launch line + * + * Upload a file to /home/john/sftp_tests/ + * * |[ * gst-launch-1.0 filesrc location=/home/jdoe/some.file ! curlsftpsink \ * file-name=some.file.backup \ @@ -36,8 +39,6 @@ * ssh-priv-keyfile=/home/jdoe/.ssh/id_rsa \ * create-dirs=TRUE * ]| - * </refsect2> - * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlsmtpsink.c b/ext/curl/gstcurlsmtpsink.c index 07a147430..b9e9bf2d9 100644 --- a/ext/curl/gstcurlsmtpsink.c +++ b/ext/curl/gstcurlsmtpsink.c @@ -19,14 +19,17 @@ /** * SECTION:element-curlsink + * @title: curlsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * * This is a network sink that uses libcurl as a client to upload data to * an SMTP server. * - * <refsect2> - * <title>Example launch line (upload a JPEG file to an SMTP server)</title> + * ## Example launch line + * + * Upload a JPEG file to an SMTP server. + * * |[ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsmtpsink \ * file-name=image.jpg \ @@ -38,7 +41,7 @@ * use-ssl=TRUE \ * insecure=TRUE * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/curl/gstcurlsshsink.c b/ext/curl/gstcurlsshsink.c index b06cbcd5b..cdbff086b 100644 --- a/ext/curl/gstcurlsshsink.c +++ b/ext/curl/gstcurlsshsink.c @@ -19,6 +19,7 @@ /** * SECTION:element-curlsshsink + * @title: curlsshsink * @short_description: sink that uploads data to a server using libcurl * @see_also: * diff --git a/ext/curl/gstcurltlssink.c b/ext/curl/gstcurltlssink.c index 35c1c1c5a..94f9d6544 100644 --- a/ext/curl/gstcurltlssink.c +++ b/ext/curl/gstcurltlssink.c @@ -19,6 +19,7 @@ /** * SECTION:element-curltlssink + * @title: curltlssink * @short_description: sink that uploads data to a server using libcurl * @see_also: * diff --git a/ext/daala/gstdaaladec.c b/ext/daala/gstdaaladec.c index 90fb38f5d..fe3d300f6 100644 --- a/ext/daala/gstdaaladec.c +++ b/ext/daala/gstdaaladec.c @@ -23,6 +23,7 @@ /** * SECTION:element-daaladec + * @title: daaladec * @see_also: daalaenc, oggdemux * * This element decodes daala streams into raw video @@ -30,13 +31,12 @@ * video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org * Foundation</ulink>. * - * <refsect2> - * <title>Example pipeline</title> + * ## Example pipeline * |[ * gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! daaladec ! xvimagesink * ]| This example pipeline will decode an ogg stream and decodes the daala video. Refer to * the daalaenc example to create the ogg file. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/daala/gstdaalaenc.c b/ext/daala/gstdaalaenc.c index 7209ddfa3..909f14bc0 100644 --- a/ext/daala/gstdaalaenc.c +++ b/ext/daala/gstdaalaenc.c @@ -23,6 +23,7 @@ /** * SECTION:element-daalaenc + * @title: daalaenc * @see_also: daaladec, oggmux * * This element encodes raw video into a Daala stream. @@ -30,14 +31,13 @@ * video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org * Foundation</ulink>. * - * <refsect2> - * <title>Example pipeline</title> + * ## Example pipeline * |[ * gst-launch-1.0 -v videotestsrc num-buffers=1000 ! daalaenc ! oggmux ! filesink location=videotestsrc.ogg * ]| This example pipeline will encode a test video source to daala muxed in an * ogg container. Refer to the daaladec documentation to decode the create * stream. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/dash/gstdashdemux.c b/ext/dash/gstdashdemux.c index fe9106182..ddb32468c 100644 --- a/ext/dash/gstdashdemux.c +++ b/ext/dash/gstdashdemux.c @@ -29,9 +29,10 @@ */ /** * SECTION:element-dashdemux + * @title: dashdemux * * DASH demuxer element. - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 playbin uri="http://www-itec.uni-klu.ac.at/ftp/datasets/mmsys12/RedBullPlayStreets/redbull_4s/RedBullPlayStreets_4s_isoffmain_DIS_23009_1_v_2_1c2_2011_08_30.mpd" * ]| diff --git a/ext/dc1394/gstdc1394src.c b/ext/dc1394/gstdc1394src.c index 3ab3026d0..0c92f9231 100644 --- a/ext/dc1394/gstdc1394src.c +++ b/ext/dc1394/gstdc1394src.c @@ -20,6 +20,7 @@ /** * SECTION:element-dc1394src + * @title: dc1394src * * Source for IIDC (Instrumentation & Industrial Digital Camera) firewire * cameras. If several cameras are connected to the system, the desired one @@ -31,8 +32,7 @@ * corresponding video formats are exposed in the capabilities. * The Bayer pattern is left unspecified. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 -v dc1394src ! videoconvert ! autovideosink * ]| Capture and display frames from the first camera available in the system. @@ -41,7 +41,7 @@ * ! "video/x-bayer,format=gbrg,width=1280,height=960,framerate=15/2" \ * ! bayer2rgb ! videoconvert ! autovideosink * ]| Capture and display frames from a specific camera in the desired format. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/directfb/dfbvideosink.c b/ext/directfb/dfbvideosink.c index a6f433bfa..de3bf9163 100644 --- a/ext/directfb/dfbvideosink.c +++ b/ext/directfb/dfbvideosink.c @@ -20,66 +20,52 @@ /** * SECTION:element-dfbvideosink + * @title: dfbvideosink * * DfbVideoSink renders video frames using the * <ulink url="http://www.directfb.org/">DirectFB</ulink> library. * Rendering can happen in two different modes : - * <itemizedlist> - * <listitem> - * <para> - * Standalone: this mode will take complete control of the monitor forcing + * + * * Standalone: this mode will take complete control of the monitor forcing * <ulink url="http://www.directfb.org/">DirectFB</ulink> to fullscreen layout. * This is convenient to test using the gst-launch-1.0 command line tool or * other simple applications. It is possible to interrupt playback while * being in this mode by pressing the Escape key. - * </para> - * <para> * This mode handles navigation events for every input device supported by * the <ulink url="http://www.directfb.org/">DirectFB</ulink> library, it will * look for available video modes in the fb.modes file and try to switch - * the framebuffer video mode to the most suitable one. Depending on + * the framebuffer video mode to the most suitable one. Depending on * hardware acceleration capabilities the element will handle scaling or not. * If no acceleration is available it will do clipping or centering of the * video frames respecting the original aspect ratio. - * </para> - * </listitem> - * <listitem> - * <para> - * Embedded: this mode will render video frames in a + * + * * Embedded: this mode will render video frames in a * #GstDfbVideoSink:surface provided by the * application developer. This is a more advanced usage of the element and - * it is required to integrate video playback in existing + * it is required to integrate video playback in existing * <ulink url="http://www.directfb.org/">DirectFB</ulink> applications. - * </para> - * <para> * When using this mode the element just renders to the - * #GstDfbVideoSink:surface provided by the + * #GstDfbVideoSink:surface provided by the * application, that means it won't handle navigation events and won't resize * the #GstDfbVideoSink:surface to fit video * frames geometry. Application has to implement the necessary code to grab * informations about the negotiated geometry and resize there * #GstDfbVideoSink:surface accordingly. - * </para> - * </listitem> - * </itemizedlist> - * For both modes the element implements a buffer pool allocation system to - * optimize memory allocation time and handle reverse negotiation. Indeed if + * + * For both modes the element implements a buffer pool allocation system to + * optimize memory allocation time and handle reverse negotiation. Indeed if * you insert an element like videoscale in the pipeline the video sink will * negotiate with it to try get a scaled video for either the fullscreen layout * or the application provided external #GstDfbVideoSink:surface. * - * <refsect2> - * <title>Example application</title> - * <para> + * ## Example application + * * <include xmlns="http://www.w3.org/2003/XInclude" href="element-dfb-example.xml" /> - * </para> - * </refsect2> - * <refsect2> - * <title>Example pipelines</title> + * + * ## Example pipelines * |[ * gst-launch-1.0 -v videotestsrc ! dfbvideosink hue=20000 saturation=40000 brightness=25000 * ]| test the colorbalance interface implementation in dfbvideosink - * </refsect2> */ #ifdef HAVE_CONFIG_H @@ -810,7 +796,7 @@ gst_dfbvideosink_setup (GstDfbVideoSink * dfbvideosink) dfbvideosink->backbuffer = FALSE; dfbvideosink->pixel_format = DSPF_UNKNOWN; - /* If we do it all by ourself we create the DirectFB context, get the + /* If we do it all by ourself we create the DirectFB context, get the primary layer and use a fullscreen configuration */ if (!dfbvideosink->ext_surface) { GST_DEBUG_OBJECT (dfbvideosink, "no external surface, taking over " diff --git a/ext/dts/gstdtsdec.c b/ext/dts/gstdtsdec.c index 1c91ce195..2f786f738 100644 --- a/ext/dts/gstdtsdec.c +++ b/ext/dts/gstdtsdec.c @@ -20,18 +20,18 @@ /** * SECTION:element-dtsdec + * @title: dtsdec * * Digital Theatre System (DTS) audio decoder - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch-1.0 dvdreadsrc title=1 ! mpegpsdemux ! dtsdec ! audioresample ! audioconvert ! alsasink * ]| Play a DTS audio track from a dvd. * |[ * gst-launch-1.0 filesrc location=abc.dts ! dtsdec ! audioresample ! audioconvert ! alsasink * ]| Decode a standalone file and play it. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/faac/gstfaac.c b/ext/faac/gstfaac.c index 7e3cf4148..8b861edce 100644 --- a/ext/faac/gstfaac.c +++ b/ext/faac/gstfaac.c @@ -20,16 +20,16 @@ /** * SECTION:element-faac + * @title: faac * @see_also: faad * * faac encodes raw audio to AAC (MPEG-4 part 3) streams. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 audiotestsrc wave=sine num-buffers=100 ! audioconvert ! faac ! matroskamux ! filesink location=sine.mkv * ]| Encode a sine beep as aac and write to matroska container. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/faad/gstfaad.c b/ext/faad/gstfaad.c index 09d927c82..0c6122fdb 100644 --- a/ext/faad/gstfaad.c +++ b/ext/faad/gstfaad.c @@ -20,19 +20,19 @@ /** * SECTION:element-faad + * @title: faad * @seealso: faac * * faad decodes AAC (MPEG-4 part 3) stream. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 filesrc location=example.mp4 ! qtdemux ! faad ! audioconvert ! audioresample ! autoaudiosink * ]| Play aac from mp4 file. * |[ * gst-launch-1.0 filesrc location=example.adts ! faad ! audioconvert ! audioresample ! autoaudiosink * ]| Play standalone aac bitstream. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/fluidsynth/gstfluiddec.c b/ext/fluidsynth/gstfluiddec.c index 62262ecd7..a98b5f90b 100644 --- a/ext/fluidsynth/gstfluiddec.c +++ b/ext/fluidsynth/gstfluiddec.c @@ -21,19 +21,19 @@ /** * SECTION:element-fluiddec + * @title: fluiddec * @see_also: timidity, wildmidi * * This element renders midi-events as audio streams using * <ulink url="http://fluidsynth.sourceforge.net//">Fluidsynth</ulink>. * It offers better sound quality compared to the timidity or wildmidi element. * - * <refsect2> - * <title>Example pipeline</title> + * ## Example pipeline * |[ * gst-launch-1.0 filesrc location=song.mid ! midiparse ! fluiddec ! pulsesink * ]| This example pipeline will parse the midi and render to raw audio which is * played via pulseaudio. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglbumper.c b/ext/gl/gstglbumper.c index 9fdb4a079..8dd812cb8 100644 --- a/ext/gl/gstglbumper.c +++ b/ext/gl/gstglbumper.c @@ -21,16 +21,16 @@ /** * SECTION:element-glbumper + * @title: glbumper * * Bump mapping using the normal method. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! glupload ! glbumper location=normalmap.bmp ! glimagesink * ]| A pipeline to test normal mapping. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglcolorbalance.c b/ext/gl/gstglcolorbalance.c index 33b5a50a3..c2482a3d6 100644 --- a/ext/gl/gstglcolorbalance.c +++ b/ext/gl/gstglcolorbalance.c @@ -22,16 +22,16 @@ /** * SECTION:element-glcolorbalance + * @title: glcolorbalance * * Adjusts brightness, contrast, hue, saturation on a video stream. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc ! glupload ! glcolorbalance saturation=0.0 ! glcolorconvert ! gldownload ! ximagesink * ]| This pipeline converts the image to black and white by setting the * saturation to 0.0. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglcolorscale.c b/ext/gl/gstglcolorscale.c index 2ca5480e9..b7b0709dc 100644 --- a/ext/gl/gstglcolorscale.c +++ b/ext/gl/gstglcolorscale.c @@ -20,17 +20,15 @@ /** * SECTION:element-glcolorscale + * @title: glcolorscale * * video frame scaling and colorspace conversion. * - * <refsect2> - * <title>Scaling and Color space conversion</title> - * <para> + * ## Scaling and Color space conversion + * * Equivalent to glupload ! gldownload. - * </para> - * </refsect2> - * <refsect2> - * <title>Examples</title> + * + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! video/x-raw ! glcolorscale ! ximagesink * ]| A pipeline to test colorspace conversion. @@ -40,7 +38,7 @@ * video/x-raw, width=320, height=240, format=YV12 ! videoconvert ! autovideosink * ]| A pipeline to test hardware scaling and colorspace conversion. * FBO and GLSL are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgldeinterlace.c b/ext/gl/gstgldeinterlace.c index 568f26702..dbc2c24d4 100644 --- a/ext/gl/gstgldeinterlace.c +++ b/ext/gl/gstgldeinterlace.c @@ -20,16 +20,16 @@ /** * SECTION:element-deinterlace + * @title: deinterlace * * Deinterlacing using based on fragment shaders. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! gldeinterlace ! glimagesink * ]| * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgldifferencematte.c b/ext/gl/gstgldifferencematte.c index b50e69bf7..aa1092a17 100644 --- a/ext/gl/gstgldifferencematte.c +++ b/ext/gl/gstgldifferencematte.c @@ -20,16 +20,16 @@ /** * SECTION:element-gldifferencematte. + * @title: gldifferencematte. * * Saves a background frame and replace it with a pixbuf. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! gldifferencemate location=backgroundimagefile ! glimagesink * ]| * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgleffects.c b/ext/gl/gstgleffects.c index 2af3368af..d8a7da107 100644 --- a/ext/gl/gstgleffects.c +++ b/ext/gl/gstgleffects.c @@ -20,16 +20,16 @@ /** * SECTION:element-gleffects. + * @title: gleffects. * * GL Shading Language effects. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! gleffects effect=5 ! glimagesink * ]| * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfilterapp.c b/ext/gl/gstglfilterapp.c index 925373b89..fc2e6ac5e 100644 --- a/ext/gl/gstglfilterapp.c +++ b/ext/gl/gstglfilterapp.c @@ -20,20 +20,18 @@ /** * SECTION:element-glfilterapp + * @title: glfilterapp * * The resize and redraw callbacks can be set from a client code. * - * <refsect2> - * <title>CLient callbacks</title> - * <para> - * The graphic scene can be written from a client code through the + * ## CLient callbacks + * + * The graphic scene can be written from a client code through the * two glfilterapp properties. - * </para> - * </refsect2> - * <refsect2> - * <title>Examples</title> + * + * ## Examples * see gst-plugins-gl/tests/examples/generic/recordgraphic - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfiltercube.c b/ext/gl/gstglfiltercube.c index 735d45e5e..427782afd 100644 --- a/ext/gl/gstglfiltercube.c +++ b/ext/gl/gstglfiltercube.c @@ -20,11 +20,11 @@ /** * SECTION:element-glfiltercube + * @title: glfiltercube * * The resize and redraw callbacks can be set from a client code. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! glfiltercube ! glimagesink * ]| A pipeline to mpa textures on the 6 cube faces.. @@ -37,7 +37,7 @@ * gst-launch-1.0 -v videotestsrc ! video/x-raw, width=640, height=480 ! glfiltercube ! glimagesink * ]| Resize scene before drawing the cube. * The scene size is greater than the input video size. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfilterglass.c b/ext/gl/gstglfilterglass.c index 254199dd5..ea20239a4 100644 --- a/ext/gl/gstglfilterglass.c +++ b/ext/gl/gstglfilterglass.c @@ -21,11 +21,11 @@ /** * SECTION:element-glfilterglass + * @title: glfilterglass * * Map textures on moving glass. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! glfilterglass ! glimagesink * ]| A pipeline inspired from http://www.mdk.org.pl/2007/11/17/gl-colorspace-conversions @@ -33,7 +33,7 @@ * |[ * gst-launch-1.0 -v videotestsrc ! glfilterglass ! video/x-raw, width=640, height=480 ! glimagesink * ]| The scene is greater than the input size. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfilterreflectedscreen.c b/ext/gl/gstglfilterreflectedscreen.c index fb1533890..62ccd9bf7 100644 --- a/ext/gl/gstglfilterreflectedscreen.c +++ b/ext/gl/gstglfilterreflectedscreen.c @@ -20,15 +20,15 @@ /** * SECTION:element-glfilterreflectedscreen + * @title: glfilterreflectedscreen * * Map Video Texture upon a screen, on a reflecting surface * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! glfilterreflectedscreen ! glimagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglfiltershader.c b/ext/gl/gstglfiltershader.c index f3434690b..8f5a990cf 100644 --- a/ext/gl/gstglfiltershader.c +++ b/ext/gl/gstglfiltershader.c @@ -21,11 +21,11 @@ /** * SECTION:element-glshader + * @title: glshader * * OpenGL fragment shader filter * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! glshader fragment="\"`cat myshader.frag`\"" ! glimagesink * ]| @@ -45,13 +45,12 @@ * uniform float time; * uniform float width; * uniform float height; - * + * * void main () { * gl_FragColor = texture2D( tex, v_texcoord ); * } * ]| * - * </refsect2> */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/ext/gl/gstglimagesink.c b/ext/gl/gstglimagesink.c index 1c8d314f2..f6a61ac92 100644 --- a/ext/gl/gstglimagesink.c +++ b/ext/gl/gstglimagesink.c @@ -23,6 +23,7 @@ /** * SECTION:element-glimagesink + * @title: glimagesink * * glimagesink renders video frames to a drawable on a local or remote * display using OpenGL. This element can receive a Window ID from the @@ -34,28 +35,23 @@ * See the #GstGLDisplay documentation for a list of environment variables that * can override window/platform detection. * - * <refsect2> - * <title>Scaling</title> - * <para> + * ## Scaling + * * Depends on the driver, OpenGL handles hardware accelerated * scaling of video frames. This means that the element will just accept * incoming video frames no matter their geometry and will then put them to the * drawable scaling them on the fly. Using the #GstGLImageSink:force-aspect-ratio * property it is possible to enforce scaling with a constant aspect ratio, * which means drawing black borders around the video frame. - * </para> - * </refsect2> - * <refsect2> - * <title>Events</title> - * <para> + * + * ## Events + * * Through the gl thread, glimagesink handle some events coming from the drawable * to manage its appearance even when the data is not flowing (GST_STATE_PAUSED). * That means that even when the element is paused, it will receive expose events * from the drawable and draw the latest frame with correct borders/aspect-ratio. - * </para> - * </refsect2> - * <refsect2> - * <title>Examples</title> + * + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc ! video/x-raw ! glimagesink * ]| A pipeline to test hardware scaling. @@ -80,7 +76,7 @@ * ]| The graphic FPS scene can be greater than the input video FPS. * The graphic scene can be written from a client code through the * two glfilterapp properties. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglmosaic.c b/ext/gl/gstglmosaic.c index fcd4926a7..77aec599c 100644 --- a/ext/gl/gstglmosaic.c +++ b/ext/gl/gstglmosaic.c @@ -20,14 +20,14 @@ /** * SECTION:element-glmosaic + * @title: glmosaic * * glmixer sub element. N gl sink pads to 1 source pad. * N + 1 OpenGL contexts shared together. * N <= 6 because the rendering is more a like a cube than a mosaic * Each opengl input stream is rendered on a cube face * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! video/x-raw, format=YUY2 ! queue ! glmosaic name=m ! glimagesink \ * videotestsrc pattern=12 ! video/x-raw, format=I420, framerate=5/1, width=100, height=200 ! queue ! m. \ @@ -37,7 +37,7 @@ * videotestsrc ! gleffects effect=6 ! queue ! m. * ]| * FBO (Frame Buffer Object) is required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgloverlay.c b/ext/gl/gstgloverlay.c index 8c21bac77..173d11820 100644 --- a/ext/gl/gstgloverlay.c +++ b/ext/gl/gstgloverlay.c @@ -20,16 +20,16 @@ /** * SECTION:element-gloverlay + * @title: gloverlay * * Overlay GL video texture with a PNG image * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! gloverlay location=image.jpg ! glimagesink * ]| * FBO (Frame Buffer Object) is required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglstereomix.c b/ext/gl/gstglstereomix.c index 18e7111a7..78a2934fa 100644 --- a/ext/gl/gstglstereomix.c +++ b/ext/gl/gstglstereomix.c @@ -23,6 +23,7 @@ /** * SECTION:element-glstereomix + * @title: glstereomix * * Combine 2 input streams to produce a stereoscopic output * stream. Input views are taken from the left pad and right pad @@ -34,8 +35,7 @@ * The multiview representation on the output is chosen according to * the downstream caps. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 -v videotestsrc pattern=ball name=left \ * videotestsrc name=right glstereomix name=mix \ @@ -52,10 +52,10 @@ * right. ! video/x-raw,width=640,height=480 ! glupload ! mix. \ * mix. ! video/x-raw'(memory:GLMemory)',multiview-mode=top-bottom ! \ * glcolorconvert ! gldownload ! queue ! x264enc ! h264parse ! \ - * mp4mux ! progressreport ! filesink location=output.mp4 + * mp4mux ! progressreport ! filesink location=output.mp4 * ]| Mix the input from a camera to the left view, and videotestsrc to the right view, * and encode as a top-bottom frame packed H.264 video. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/ext/gl/gstglstereosplit.c b/ext/gl/gstglstereosplit.c index 5650ab4d6..933f3c593 100644 --- a/ext/gl/gstglstereosplit.c +++ b/ext/gl/gstglstereosplit.c @@ -20,16 +20,16 @@ /** * SECTION:element-glstereosplit + * @title: glstereosplit * * Receive a stereoscopic video stream and split into left/right * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glstereosplit name=s ! queue ! glimagesink s. ! queue ! glimagesink * ]| * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgltestsrc.c b/ext/gl/gstgltestsrc.c index 83218b4ac..e9c7df85a 100644 --- a/ext/gl/gstgltestsrc.c +++ b/ext/gl/gstgltestsrc.c @@ -23,21 +23,19 @@ /** * SECTION:element-gltestsrc + * @title: gltestsrc * - * <refsect2> - * <para> * The gltestsrc element is used to produce test video texture. * The video test produced can be controlled with the "pattern" * property. - * </para> - * <title>Example launch line</title> - * <para> - * <programlisting> + * + * ## Example launch line + * + * |[ * gst-launch-1.0 -v gltestsrc pattern=smpte ! glimagesink - * </programlisting> + * ]| * Shows original SMPTE color bars in a window. - * </para> - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstgltransformation.c b/ext/gl/gstgltransformation.c index f6c9e3f26..729a9f653 100644 --- a/ext/gl/gstgltransformation.c +++ b/ext/gl/gstgltransformation.c @@ -21,11 +21,11 @@ /** * SECTION:element-gltransformation + * @title: gltransformation * * Transforms video on the GPU. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 gltestsrc ! gltransformation rotation-z=45 ! glimagesink * ]| A pipeline to rotate by 45 degrees @@ -38,7 +38,7 @@ * |[ * gst-launch-1.0 gltestsrc ! gltransformation rotation-x=-45 ortho=True ! glimagesink * ]| Rotate the video around the X-Axis by -45° with an orthographic projection - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglvideoflip.c b/ext/gl/gstglvideoflip.c index 5d2dac5ee..d5bb2ac57 100644 --- a/ext/gl/gstglvideoflip.c +++ b/ext/gl/gstglvideoflip.c @@ -20,15 +20,15 @@ /** * SECTION:element-glvideo_flip + * @title: glvideo_flip * * Transforms video on the GPU. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! glvideoflip method=clockwise ! glimagesinkelement * ]| This pipeline flips the test image 90 degrees clockwise. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglvideomixer.c b/ext/gl/gstglvideomixer.c index ebd682752..5df126d87 100644 --- a/ext/gl/gstglvideomixer.c +++ b/ext/gl/gstglvideomixer.c @@ -20,13 +20,13 @@ /** * SECTION:element-glvideomixer + * @title: glvideomixer * * Composites a number of streams into a single output scene using OpenGL in * a similar fashion to compositor and videomixer. See the compositor plugin * for documentation about the #GstGLVideoMixerPad properties. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 glvideomixer name=m ! glimagesink \ * videotestsrc ! video/x-raw, format=YUY2 ! glupload ! glcolorconvert ! m. \ @@ -36,7 +36,7 @@ * videotestsrc ! glupload ! glfiltercube ! queue ! m. \ * videotestsrc ! glupload ! gleffects effect=6 ! queue ! m.gst-launch-1.0 glvideomixer name=m ! glimagesink \ * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstglviewconvert.c b/ext/gl/gstglviewconvert.c index 617df449d..670305c81 100644 --- a/ext/gl/gstglviewconvert.c +++ b/ext/gl/gstglviewconvert.c @@ -22,14 +22,14 @@ /** * SECTION:element-glviewconvert + * @title: glviewconvert * * Convert stereoscopic video between different representations using fragment shaders. * * The element can use either property settings or caps negotiation to choose the * input and output formats to process. * - * <refsect2> - * <title>Examples</title> + * ## Examples * |[ * gst-launch-1.0 videotestsrc ! glupload ! glviewconvert ! glimagesink * ]| Simple placebo example demonstrating identity passthrough of mono video @@ -39,7 +39,7 @@ * ]| Force re-interpretation of the input checkers pattern as a side-by-side stereoscopic * image and display in glimagesink. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gl/gstopengl.c b/ext/gl/gstopengl.c index 4a8c4d6a1..75f8d3332 100644 --- a/ext/gl/gstopengl.c +++ b/ext/gl/gstopengl.c @@ -23,20 +23,20 @@ /** * SECTION:plugin-opengl + * @title: GstOpengl * * Cross-platform OpenGL plugin. - * <refsect2> - * <title>Debugging</title> - * </refsect2> - * <refsect2> - * <title>Examples</title> + * + * ## Debugging + * + * ## Examples * |[ * gst-launch-1.0 --gst-debug=gldisplay:3 videotestsrc ! glimagesink * ]| A debugging pipeline. |[ * GST_DEBUG=gl*:6 gst-launch-1.0 videotestsrc ! glimagesink * ]| A debugging pipelines related to shaders. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/gtk/gstgtkbasesink.c b/ext/gtk/gstgtkbasesink.c index 44c8cbd46..843c97f1b 100644 --- a/ext/gtk/gstgtkbasesink.c +++ b/ext/gtk/gstgtkbasesink.c @@ -20,6 +20,7 @@ /** * SECTION:gtkgstsink + * @title: GstGtkBaseSink * */ diff --git a/ext/gtk/gstgtkglsink.c b/ext/gtk/gstgtkglsink.c index 2bdd331c6..56326882a 100644 --- a/ext/gtk/gstgtkglsink.c +++ b/ext/gtk/gstgtkglsink.c @@ -19,8 +19,8 @@ */ /** - * SECTION:gstgtkglsink - * + * SECTION:element-gtkglsink + * @title: gtkglsink */ #ifdef HAVE_CONFIG_H diff --git a/ext/gtk/gstgtksink.c b/ext/gtk/gstgtksink.c index e9f9d0cc7..ba8ea33ca 100644 --- a/ext/gtk/gstgtksink.c +++ b/ext/gtk/gstgtksink.c @@ -19,7 +19,8 @@ */ /** - * SECTION:gtkgstsink + * SECTION:element-gtkgstsink + * @title: gtkgstsink * */ diff --git a/ext/gtk/gtkgstglwidget.c b/ext/gtk/gtkgstglwidget.c index e780ebd38..ea0fe2f8a 100644 --- a/ext/gtk/gtkgstglwidget.c +++ b/ext/gtk/gtkgstglwidget.c @@ -41,6 +41,7 @@ /** * SECTION:gtkgstglwidget + * @title: GtkGstGlWidget * @short_description: a #GtkGLArea that renders GStreamer video #GstBuffers * @see_also: #GtkGLArea, #GstBuffer * diff --git a/ext/gtk/gtkgstwidget.c b/ext/gtk/gtkgstwidget.c index 5fe238a54..a936210ba 100644 --- a/ext/gtk/gtkgstwidget.c +++ b/ext/gtk/gtkgstwidget.c @@ -29,6 +29,7 @@ /** * SECTION:gtkgstwidget + * @title: GtkGstWidget * @short_description: a #GtkWidget that renders GStreamer video #GstBuffers * @see_also: #GtkDrawingArea, #GstBuffer * diff --git a/ext/hls/gsthlsdemux.c b/ext/hls/gsthlsdemux.c index b75e600ca..ea49fd0ee 100644 --- a/ext/hls/gsthlsdemux.c +++ b/ext/hls/gsthlsdemux.c @@ -26,15 +26,15 @@ */ /** * SECTION:element-hlsdemux + * @title: hlsdemux * * HTTP Live Streaming demuxer element. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 souphttpsrc location=http://devimages.apple.com/iphone/samples/bipbop/gear4/prog_index.m3u8 ! hlsdemux ! decodebin ! videoconvert ! videoscale ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/hls/gsthlssink.c b/ext/hls/gsthlssink.c index b56ef6c8f..9ae6ca6d3 100644 --- a/ext/hls/gsthlssink.c +++ b/ext/hls/gsthlssink.c @@ -19,15 +19,15 @@ /** * SECTION:element-hlssink + * @title: hlssink * * HTTP Live Streaming sink/server * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc is-live=true ! x264enc ! mpegtsmux ! hlssink max-files=5 * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/ext/iqa/iqa.c b/ext/iqa/iqa.c index de09e81f2..b91275f6f 100644 --- a/ext/iqa/iqa.c +++ b/ext/iqa/iqa.c @@ -19,6 +19,7 @@ /** * SECTION:element-iqa + * @title: iqa * @short_description: Image Quality Assessment plugin. * * IQA will perform full reference image quality assessment, with the @@ -48,13 +49,12 @@ * sink_2\=\(double\)0.0082939683976297474\;", * time=(guint64)0; * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -m uridecodebin uri=file:///test/file/1 ! iqa name=iqa do-dssim=true \ * ! videoconvert ! autovideosink uridecodebin uri=file:///test/file/2 ! iqa. * ]| This pipeline will output messages to the console for each set of compared frames. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/kate/gstkatedec.c b/ext/kate/gstkatedec.c index 88a56660d..65b29bfdb 100644 --- a/ext/kate/gstkatedec.c +++ b/ext/kate/gstkatedec.c @@ -45,33 +45,29 @@ /** * SECTION:element-katedec + * @title: katedec * @see_also: oggdemux * - * <refsect2> - * <para> * This element decodes Kate streams * <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec * for text based data, such as subtitles. Any number of kate streams can be * embedded in an Ogg stream. - * </para> - * <para> + * * libkate (see above url) is needed to build this plugin. - * </para> - * <title>Example pipeline</title> - * <para> + * + * ## Example pipeline + * * This explicitely decodes a Kate stream: - * <programlisting> + * |[ * gst-launch-1.0 filesrc location=test.ogg ! oggdemux ! katedec ! fakesink silent=TRUE - * </programlisting> - * </para> - * <para> + * ]| + * * This will automatically detect and use any Kate streams multiplexed * in an Ogg stream: - * <programlisting> + * |[ * gst-launch-1.0 playbin uri=file:///tmp/test.ogg - * </programlisting> - * </para> - * </refsect2> + * ]| + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/kate/gstkateenc.c b/ext/kate/gstkateenc.c index 12bd1c842..5b109c988 100644 --- a/ext/kate/gstkateenc.c +++ b/ext/kate/gstkateenc.c @@ -46,26 +46,23 @@ /** * SECTION:element-kateenc + * @title: kateenc * @see_also: oggmux * - * <refsect2> - * <para> * This element encodes Kate streams * <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec * for text based data, such as subtitles. Any number of kate streams can be * embedded in an Ogg stream. - * </para> - * <para> + * * libkate (see above url) is needed to build this plugin. - * </para> - * <title>Example pipeline</title> - * <para> + * + * ## Example pipeline + * * This encodes a DVD SPU track to a Kate stream: - * <programlisting> + * |[ * gst-launch-1.0 dvdreadsrc ! dvddemux ! dvdsubparse ! kateenc category=spu-subtitles ! oggmux ! filesink location=test.ogg - * </programlisting> - * </para> - * </refsect2> + * ]| + * */ /* FIXME: diff --git a/ext/kate/gstkateparse.c b/ext/kate/gstkateparse.c index 0cb9db0f3..83e3270d9 100644 --- a/ext/kate/gstkateparse.c +++ b/ext/kate/gstkateparse.c @@ -21,40 +21,35 @@ /** * SECTION:element-kateparse + * @title: kateparse * @short_description: parses kate streams * @see_also: katedec, vorbisparse, oggdemux, theoraparse * - * <refsect2> - * <para> * The kateparse element will parse the header packets of the Kate * stream and put them as the streamheader in the caps. This is used in the * multifdsink case where you want to stream live kate streams to multiple * clients, each client has to receive the streamheaders first before they can * consume the kate packets. - * </para> - * <para> + * * This element also makes sure that the buffers that it pushes out are properly * timestamped and that their offset and offset_end are set. The buffers that * kateparse outputs have all of the metadata that oggmux expects to receive, * which allows you to (for example) remux an ogg/kate file. - * </para> - * <title>Example pipelines</title> - * <para> - * <programlisting> + * + * ## Example pipelines + * + * |[ * gst-launch-1.0 -v filesrc location=kate.ogg ! oggdemux ! kateparse ! fakesink - * </programlisting> + * ]| * This pipeline shows that the streamheader is set in the caps, and that each * buffer has the timestamp, duration, offset, and offset_end set. - * </para> - * <para> - * <programlisting> + * + * |[ * gst-launch-1.0 filesrc location=kate.ogg ! oggdemux ! kateparse \ * ! oggmux ! filesink location=kate-remuxed.ogg - * </programlisting> + * ]| * This pipeline shows remuxing. kate-remuxed.ogg might not be exactly the same * as kate.ogg, but they should produce exactly the same decoded data. - * </para> - * </refsect2> * */ diff --git a/ext/kate/gstkatetag.c b/ext/kate/gstkatetag.c index e280917bc..4b2ec5706 100644 --- a/ext/kate/gstkatetag.c +++ b/ext/kate/gstkatetag.c @@ -21,46 +21,41 @@ /** * SECTION:element-katetag + * @title: katetag * @see_also: #oggdemux, #oggmux, #kateparse, #GstTagSetter * @short_description: retags kate streams * - * <refsect2> - * <para> * The katetag element can change the tag contained within a raw * kate stream. Specifically, it modifies the comments header packet * of the kate stream, as well as the language and category of the * kate stream. - * </para> - * <para> + * * The element will also process the stream as the #kateparse element does * so it can be used when remuxing an Ogg Kate stream, without additional * elements. - * </para> - * <para> + * * Applications can set the tags to write using the #GstTagSetter interface. * Tags contained within the kate stream will be picked up * automatically (and merged according to the merge mode set via the tag * setter interface). - * </para> - * <title>Example pipelines</title> - * <para> + * + * ## Example pipelines + * * This element is only useful with gst-launch-1.0 for modifying the language * and/or category (which are properties of the stream located in the kate * beginning of stream header), because it does not support setting the tags * on a #GstTagSetter interface. Conceptually, the element will usually be * used like: - * <programlisting> + * |[ * gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag ! oggmux ! filesink location=bar.ogg - * </programlisting> - * </para> - * <para> + * ]| + * * This pipeline will set the language and category of the stream to the * given values: - * <programlisting> + * |[ * gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag language=pt_BR category=subtitles ! oggmux ! filesink location=bar.ogg - * </programlisting> - * </para> - * </refsect2> + * ]| + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/kate/gstkatetiger.c b/ext/kate/gstkatetiger.c index e4f3ca934..21970c9f6 100644 --- a/ext/kate/gstkatetiger.c +++ b/ext/kate/gstkatetiger.c @@ -45,32 +45,29 @@ /** * SECTION:element-tiger + * @title: tiger * @see_also: katedec * - * <refsect2> - * <para> * This element decodes and renders Kate streams * <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec * for text based data, such as subtitles. Any number of kate streams can be * embedded in an Ogg stream. - * </para> - * <para> + * * libkate (see above url) and <ulink url="http://libtiger.googlecode.com/">libtiger</ulink> * are needed to build this element. - * </para> - * <title>Example pipeline</title> - * <para> + * + * ## Example pipeline + * * This pipeline renders a Kate stream on top of a Theora video multiplexed * in the same stream: - * <programlisting> + * |[ * gst-launch-1.0 \ * filesrc location=video.ogg ! oggdemux name=demux \ * demux. ! queue ! theoradec ! videoconvert ! tiger name=tiger \ * demux. ! queue ! kateparse ! tiger. \ * tiger. ! videoconvert ! autovideosink - * </programlisting> - * </para> - * </refsect2> + * ]| + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/ladspa/gstladspa.c b/ext/ladspa/gstladspa.c index 624cbd81a..8fea1096a 100644 --- a/ext/ladspa/gstladspa.c +++ b/ext/ladspa/gstladspa.c @@ -22,6 +22,7 @@ /** * SECTION:element-ladspa + * @title: ladspa * @short_description: bridge for LADSPA (Linux Audio Developer's Simple Plugin API) * @see_also: #GstAudioConvert #GstAudioResample, #GstAudioTestSrc, #GstAutoAudioSink * @@ -32,8 +33,7 @@ * element classification. The functionality you get depends on the LADSPA plugins * you have installed. * - * <refsect2> - * <title>Example LADSPA line without this plugins</title> + * ## Example LADSPA line without this plugins * |[ * (padsp) listplugins * (padsp) analyseplugin cmt.so amp_mono @@ -41,16 +41,13 @@ * (padsp) applyplugin testin.wav testout.wav cmt.so amp_mono 2 * gst-launch-1.0 playbin uri=file://"$PWD"/testout.wav * ]| Decode any audio file into wav with the format expected for the specific ladspa plugin to be applied, apply the ladspa filter and play it. - * </refsect2> * * Now with this plugin: * - * <refsect2> - * <title>Example LADSPA line with this plugins</title> + * ## Example LADSPA line with this plugins * |[ * gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4 * ]| Get audio input, filter it through CAPS Plate and TAP Stereo Echo, play it and show a visualization (recommended hearphones). - * </refsect2> * * In case you wonder the plugin naming scheme, quoting ladspa.h: * "Plugin types should be identified by file and label rather than by @@ -61,60 +58,52 @@ * on top of the audio in and out one, so some parameters are readable too. * * You can see the listing of plugins available with: - * <refsect2> - * <title>Inspecting the plugins list</title> + * + * ## Inspecting the plugins list * |[ * gst-inspect ladspa * ]| List available LADSPA plugins on gstreamer. - * </refsect2> * * You can see the parameters of any plugin with: - * <refsect2> - * <title>Inspecting the plugins</title> + * + * ## Inspecting the plugins * |[ * gst-inspect ladspa-retro-flange-1208-so-retroflange * ]| List details of the plugin, parameters, range and defaults included. - * </refsect2> * * The elements categorize in: - * <itemizedlist> - * <listitem><para>Filter/Effect/Audio/LADSPA:</para> - * <refsect2> - * <title>Example Filter/Effect/Audio/LADSPA line with this plugins</title> + * + * * Filter/Effect/Audio/LADSPA: + * + * ## Example Filter/Effect/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 filesrc location="$myfile" ! decodebin ! audioconvert ! audioresample ! ladspa-calf-so-reverb decay-time=15 high-frq-damp=20000 room-size=5 diffusion=1 wet-amount=2 dry-amount=2 pre-delay=50 bass-cut=20000 treble-cut=20000 ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! autoaudiosink * ]| Decode any audio file, filter it through Calf Reverb LADSPA then TAP Stereo Echo, and play it. - * </refsect2> - * </listitem> - * <listitem><para>Source/Audio/LADSPA:</para> - * <refsect2> - * <title>Example Source/Audio/LADSPA line with this plugins</title> + * + * * Source/Audio/LADSPA: + * + * ## Example Source/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 ladspasrc-sine-so-sine-fcac frequency=220 amplitude=100 ! audioconvert ! autoaudiosink * ]| Generate a sine wave with Sine Oscillator (Freq:control, Amp:control) and play it. - * </refsect2> - * <refsect2> - * <title>Example Source/Audio/LADSPA line with this plugins</title> + * + * ## Example Source/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 ladspasrc-caps-so-click bpm=240 volume=1 ! autoaudiosink * ]| Generate clicks with CAPS Click - Metronome at 240 beats per minute and play it. - * </refsect2> - * <refsect2> - * <title>Example Source/Audio/LADSPA line with this plugins</title> + * + * ## Example Source/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 ladspasrc-random-1661-so-random-fcsc-oa ! ladspa-cmt-so-amp-mono gain=1.5 ! ladspa-caps-so-plate ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! wavescope ! videoconvert ! autovideosink * ]| Generate random wave, filter it trhough Mono Amplifier and Versatile Plate Reverb, and play, while showing, it. - * </refsect2> - * </listitem> - * <listitem><para>Sink/Audio/LADSPA:</para> - * <refsect2> - * <title>Example Sink/Audio/LADSPA line with this plugins</title> + * + * * Sink/Audio/LADSPA: + * + * ## Example Sink/Audio/LADSPA line with this plugins * |[ * gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! audioconvert ! audioresample ! queue ! ladspasink-cmt-so-null-ai myT. ! audioconvert ! audioresample ! queue ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4 * ]| Get audio input, filter it trhough Mono Amplifier, CAPS Plate LADSPA and TAP Stereo Echo, explicitily anulate audio with Null (Audio Output), and play a visualization (recommended hearphones). - * </refsect2> - * </listitem> - * </itemizedlist> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/libde265/libde265-dec.c b/ext/libde265/libde265-dec.c index ab376c505..109edadba 100644 --- a/ext/libde265/libde265-dec.c +++ b/ext/libde265/libde265-dec.c @@ -21,15 +21,15 @@ /** * SECTION:element-libde265dec + * @title: libde265dec * * Decodes HEVC/H.265 video. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 filesrc location=bitstream.hevc ! 'video/x-hevc,stream-format=byte-stream,framerate=25/1' ! libde265dec ! autovideosink * ]| The above pipeline decodes the HEVC/H.265 bitstream and renders it to the screen. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/lv2/gstlv2.c b/ext/lv2/gstlv2.c index acad44355..2c8253dc8 100644 --- a/ext/lv2/gstlv2.c +++ b/ext/lv2/gstlv2.c @@ -22,6 +22,7 @@ /** * SECTION:element-lv2 + * @title: lv2 * @short_description: bridge for LV2. * * LV2 is a standard for plugins and matching host applications, diff --git a/ext/openal/gstopenalsink.c b/ext/openal/gstopenalsink.c index 66de05113..eb9b9bace 100644 --- a/ext/openal/gstopenalsink.c +++ b/ext/openal/gstopenalsink.c @@ -24,6 +24,7 @@ /** * SECTION:element-openalsink + * @title: openalsink * @see_also: openalsrc * @short_description: capture raw audio samples through OpenAL * @@ -31,8 +32,7 @@ * * Unfortunately the capture API doesn't have a format enumeration/check. all you can do is try opening it and see if it works. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 audiotestsrc ! audioconvert ! volume volume=0.5 ! openalsink * ]| will play a sine wave (continuous beep sound) through OpenAL. @@ -42,7 +42,7 @@ * |[ * gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink * ]| will capture and play audio through OpenAL. - * </refsect2> + * */ /* diff --git a/ext/openal/gstopenalsrc.c b/ext/openal/gstopenalsrc.c index 26e73d49b..4f1e26e77 100644 --- a/ext/openal/gstopenalsrc.c +++ b/ext/openal/gstopenalsrc.c @@ -49,20 +49,20 @@ /** * SECTION:element-openalsrc + * @title: openalsrc * @see_also: openalsink * @short_description: capture raw audio samples through OpenAL * * This element captures raw audio samples through OpenAL. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v openalsrc ! audioconvert ! wavenc ! filesink location=stream.wav * ]| * will capture sound through OpenAL and encode it to a wav file. * |[ * gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink * ]| will capture and play audio through OpenAL. - * </refsect2> + * */ /* diff --git a/ext/opus/gstopusparse.c b/ext/opus/gstopusparse.c index 56e8bb838..4408af6d3 100644 --- a/ext/opus/gstopusparse.c +++ b/ext/opus/gstopusparse.c @@ -22,16 +22,16 @@ /** * SECTION:element-opusparse + * @title: opusparse * @see_also: opusenc, opusdec * * This element parses OPUS packets. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v filesrc location=opusdata ! opusparse ! opusdec ! audioconvert ! audioresample ! alsasink * ]| Decode and plays an unmuxed Opus file. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/resindvd/rsndec.c b/ext/resindvd/rsndec.c index 02d3eb645..4f4b2680f 100644 --- a/ext/resindvd/rsndec.c +++ b/ext/resindvd/rsndec.c @@ -369,7 +369,7 @@ rsn_dec_get_type (void) return type; } -/** Audio decoder subclass */ +/* Audio decoder subclass */ static GstStaticPadTemplate audio_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, @@ -422,7 +422,7 @@ rsn_audiodec_init (RsnAudioDec * self) { } -/** Video decoder subclass */ +/* Video decoder subclass */ static GstStaticPadTemplate video_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, diff --git a/ext/resindvd/rsninputselector.c b/ext/resindvd/rsninputselector.c index 3849baff4..678311603 100644 --- a/ext/resindvd/rsninputselector.c +++ b/ext/resindvd/rsninputselector.c @@ -25,6 +25,7 @@ /** * SECTION:element-input-selector + * @title: input-selector * @see_also: #GstOutputSelector * * Direct one out of N input streams to the output pad. @@ -32,21 +33,11 @@ * The input pads are from a GstPad subclass and have additional * properties, which users may find useful, namely: * - * <itemizedlist> - * <listitem> - * "running-time": Running time of stream on pad (#gint64) - * </listitem> - * <listitem> - * "tags": The currently active tags on the pad (#GstTagList, boxed type) - * </listitem> - * <listitem> - * "active": If the pad is currently active (#gboolean) - * </listitem> - * <listitem> - * "always-ok" : Make an inactive pads return #GST_FLOW_OK instead of - * #GST_FLOW_NOT_LINKED - * </listitem> - * </itemizedlist> + * * "running-time": Running time of stream on pad (#gint64) + * * "tags": The currently active tags on the pad (#GstTagList, boxed type) + * * "active": If the pad is currently active (#gboolean) + * * "always-ok" : Make an inactive pads return #GST_FLOW_OK instead of #GST_FLOW_NOT_LINKED + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/rsvg/gstrsvgdec.c b/ext/rsvg/gstrsvgdec.c index 5d5fc8ef6..e50a8978d 100644 --- a/ext/rsvg/gstrsvgdec.c +++ b/ext/rsvg/gstrsvgdec.c @@ -19,15 +19,15 @@ */ /** * SECTION:element-rsvgdec + * @title: rsvgdec * * This elements renders SVG graphics. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 filesrc location=image.svg ! rsvgdec ! imagefreeze ! videoconvert ! autovideosink * ]| render and show a svg image. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/rsvg/gstrsvgoverlay.c b/ext/rsvg/gstrsvgoverlay.c index b467dc875..5f661b9ad 100644 --- a/ext/rsvg/gstrsvgoverlay.c +++ b/ext/rsvg/gstrsvgoverlay.c @@ -21,6 +21,7 @@ /** * SECTION:element-rsvgoverlay + * @title: rsvgoverlay * * This elements overlays SVG graphics over the video. SVG data can * either be specified through properties, or fed through the @@ -44,8 +45,7 @@ * the values of the x/y/width/height attributes, by setting * height-/width-relative to 1.0. and all other attributes to 0. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay location=foo.svg ! videoconvert ! autovideosink * ]| specifies the SVG location through the filename property. @@ -55,7 +55,7 @@ * |[ * gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay data='<svg viewBox="0 0 800 600"><image x="80%" y="80%" width="10%" height="10%" xlink:href="foo.jpg" /></svg>' ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/rtmp/gstrtmpsink.c b/ext/rtmp/gstrtmpsink.c index 1815d9c64..4e07cc692 100644 --- a/ext/rtmp/gstrtmpsink.c +++ b/ext/rtmp/gstrtmpsink.c @@ -20,6 +20,7 @@ /** * SECTION:element-rtmpsink + * @title: rtmpsink * * This element delivers data to a streaming server via RTMP. It uses * librtmp, and supports any protocols/urls that librtmp supports. @@ -27,12 +28,11 @@ * for librtmp, such as 'flashver=version'. See the librtmp documentation * for more detail * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! ffenc_flv ! flvmux ! rtmpsink location='rtmp://localhost/path/to/stream live=1' * ]| Encode a test video stream to FLV video format and stream it via RTMP. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/rtmp/gstrtmpsrc.c b/ext/rtmp/gstrtmpsrc.c index db620cc1f..69cec91a6 100644 --- a/ext/rtmp/gstrtmpsrc.c +++ b/ext/rtmp/gstrtmpsrc.c @@ -26,17 +26,17 @@ /** * SECTION:element-rtmpsrc + * @title: rtmpsrc * * This plugin reads data from a local or remote location specified * by an URI. This location can be specified using any protocol supported by * the RTMP library, i.e. rtmp, rtmpt, rtmps, rtmpe, rtmfp, rtmpte and rtmpts. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 -v rtmpsrc location=rtmp://somehost/someurl ! fakesink * ]| Open an RTMP location and pass its content to fakesink. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/sbc/gstsbcdec.c b/ext/sbc/gstsbcdec.c index 5019e8326..84b515709 100644 --- a/ext/sbc/gstsbcdec.c +++ b/ext/sbc/gstsbcdec.c @@ -21,15 +21,15 @@ /** * SECTION:element-sbdec + * @title: sbdec * * This element decodes a Bluetooth SBC audio streams to raw integer PCM audio * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v filesrc location=audio.sbc ! sbcparse ! sbcdec ! audioconvert ! audioresample ! autoaudiosink * ]| Decode a raw SBC file. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/sbc/gstsbcenc.c b/ext/sbc/gstsbcenc.c index 71aca0753..c25cd32be 100644 --- a/ext/sbc/gstsbcenc.c +++ b/ext/sbc/gstsbcenc.c @@ -20,6 +20,7 @@ /** * SECTION:element-sbenc + * @title: sbenc * * This element encodes raw integer PCM audio into a Bluetooth SBC audio. * @@ -27,12 +28,11 @@ * allocation-mode can be set by adding a capsfilter element with appropriate * filtercaps after the sbcenc encoder element. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v audiotestsrc ! sbcenc ! rtpsbcpay ! udpsink * ]| Encode a sine wave into SBC, RTP payload it and send over the network using UDP - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/smoothstreaming/gstmssdemux.c b/ext/smoothstreaming/gstmssdemux.c index 120d9c22b..0fdea3b4d 100644 --- a/ext/smoothstreaming/gstmssdemux.c +++ b/ext/smoothstreaming/gstmssdemux.c @@ -22,10 +22,10 @@ /** * SECTION:element-mssdemux + * @title: mssdemux * * Demuxes a Microsoft's Smooth Streaming manifest into its audio and/or video streams. * - * */ /* diff --git a/ext/spandsp/gstdtmfdetect.c b/ext/spandsp/gstdtmfdetect.c index 362edb4f3..df7fc7ef1 100644 --- a/ext/spandsp/gstdtmfdetect.c +++ b/ext/spandsp/gstdtmfdetect.c @@ -24,36 +24,21 @@ /** * SECTION:element-dtmfdetect + * @title: dtmfdetect * @short_description: Detects DTMF tones * * This element will detect DTMF tones and emit messages. * - * The message is called <classname>"dtmf-event"</classname> and has - * the following fields: - * <itemizedlist> - * <listitem> - * <para> - * gint <classname>type</classname> (0-1): - * The application uses this field to specify which of the two methods + * The message is called `dtmf-event` and has the following fields: + * + * * gint `type` (0-1): The application uses this field to specify which of the two methods * specified in RFC 2833 to use. The value should be 0 for tones and 1 for * named events. Tones are specified by their frequencies and events are * specfied by their number. This element can only take events as input. * Do not confuse with "method" which specified the output. - * </para> - * </listitem> - * <listitem> - * <para> - * gint <classname>number</classname> (0-16): - * The event number. - * </para> - * </listitem> - * <listitem> - * <para> - * gint <classname>method</classname> (2): - * This field will always been 2 (ie sound) from this element. - * </para> - * </listitem> - * </itemizedlist> + * * gint `number` (0-16): The event number. + * * gint `method` (2): This field will always been 2 (ie sound) from this element. + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/spandsp/gstspanplc.c b/ext/spandsp/gstspanplc.c index 06e109046..f4ccf7689 100644 --- a/ext/spandsp/gstspanplc.c +++ b/ext/spandsp/gstspanplc.c @@ -20,6 +20,7 @@ /** * SECTION:element-spanplc + * @title: spanplc * * The spanplc (Packet Loss Concealment) element provides a synthetic * fill-in signal, to minimise the audible effect of lost packets in diff --git a/ext/srtp/gstsrtpdec.c b/ext/srtp/gstsrtpdec.c index 9967dd543..e1be70e0a 100644 --- a/ext/srtp/gstsrtpdec.c +++ b/ext/srtp/gstsrtpdec.c @@ -46,6 +46,7 @@ /** * SECTION:element-srtpdec + * @title: srtpdec * @see_also: srtpenc * * gstrtpdec acts as a decoder that removes security from SRTP and SRTCP @@ -95,8 +96,7 @@ * other means. If no rollover counter is provided by the user, 0 is * used by default. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 udpsrc port=5004 caps='application/x-srtp, payload=(int)8, ssrc=(uint)1356955624, srtp-key=(buffer)012345678901234567890123456789012345678901234567890123456789, srtp-cipher=(string)aes-128-icm, srtp-auth=(string)hmac-sha1-80, srtcp-cipher=(string)aes-128-icm, srtcp-auth=(string)hmac-sha1-80' ! srtpdec ! rtppcmadepay ! alawdec ! pulsesink * ]| Receive PCMA SRTP packets through UDP using caps to specify @@ -105,7 +105,7 @@ * gst-launch-1.0 audiotestsrc ! alawenc ! rtppcmapay ! 'application/x-rtp, payload=(int)8, ssrc=(uint)1356955624' ! srtpenc key="012345678901234567890123456789012345678901234567890123456789" ! udpsink port=5004 * ]| Send PCMA SRTP packets through UDP, nothing how the SSRC is forced so * that the receiver will recognize it. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/srtp/gstsrtpenc.c b/ext/srtp/gstsrtpenc.c index c9ae98070..ff17fd03f 100644 --- a/ext/srtp/gstsrtpenc.c +++ b/ext/srtp/gstsrtpenc.c @@ -45,7 +45,8 @@ */ /** - * SECTION:gst-plugin-bad-plugins-srtpenc + * SECTION:element-srtpenc + * @title: srtpenc * @see_also: srtpdec * * gstrtpenc acts as an encoder that adds security to RTP and RTCP diff --git a/ext/teletextdec/gstteletextdec.c b/ext/teletextdec/gstteletextdec.c index 7f8a8a690..d754e972c 100644 --- a/ext/teletextdec/gstteletextdec.c +++ b/ext/teletextdec/gstteletextdec.c @@ -21,16 +21,16 @@ /** * SECTION:element-teletextdec + * @title: teletextdec * * Decode a stream of raw VBI packets containing teletext information to a RGBA * stream. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v -m filesrc location=recording.mpeg ! tsdemux ! teletextdec ! videoconvert ! ximagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/ttml/gstttmlparse.c b/ext/ttml/gstttmlparse.c index 6de9a1f75..e6f21bee1 100644 --- a/ext/ttml/gstttmlparse.c +++ b/ext/ttml/gstttmlparse.c @@ -22,6 +22,7 @@ /** * SECTION:element-ttmlparse + * @title: ttmlparse * * Parses timed text subtitle files described using Timed Text Markup Language * (TTML). Currently, only the EBU-TT-D profile of TTML, designed for @@ -35,13 +36,12 @@ * elements. A downstream renderer element uses this information to correctly * render the text on top of video frames. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink * ]| Parse and render TTML subtitles contained in a single XML file over an * MP4 stream containing H.264 video and AAC audio. - * </refsect2> + * */ #include <stdio.h> diff --git a/ext/ttml/gstttmlrender.c b/ext/ttml/gstttmlrender.c index 2648facb9..ee1cb974d 100644 --- a/ext/ttml/gstttmlrender.c +++ b/ext/ttml/gstttmlrender.c @@ -25,19 +25,19 @@ /** * SECTION:element-ttmlrender + * @title: ttmlrender * * Renders timed text on top of a video stream. It receives text in buffers * from a ttmlparse element; each text string is in its own #GstMemory within * the GstBuffer, and the styling and layout associated with each text string * is in metadata attached to the #GstBuffer. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink * ]| Parse and render TTML subtitles contained in a single XML file over an * MP4 stream containing H.264 video and AAC audio: - * </refsect2> + * */ #include <gst/video/video.h> diff --git a/ext/ttml/subtitle.c b/ext/ttml/subtitle.c index b8843a379..e0c32faa1 100644 --- a/ext/ttml/subtitle.c +++ b/ext/ttml/subtitle.c @@ -20,6 +20,7 @@ /** * SECTION:gstsubtitle + * @title: GstSubtitle * @short_description: Library for describing sets of static subtitles. * * This library enables the description of static text scenes made up of a diff --git a/ext/ttml/subtitlemeta.c b/ext/ttml/subtitlemeta.c index 69da5f58b..cdbfcce24 100644 --- a/ext/ttml/subtitlemeta.c +++ b/ext/ttml/subtitlemeta.c @@ -20,6 +20,7 @@ /** * SECTION:gstsubtitlemeta + * @title: GstSubtitleMeta * @short_description: Metadata class for timed-text subtitles. * * The GstSubtitleMeta class enables the layout and styling information needed diff --git a/ext/voaacenc/gstvoaacenc.c b/ext/voaacenc/gstvoaacenc.c index 9c48ea205..0580d27f0 100644 --- a/ext/voaacenc/gstvoaacenc.c +++ b/ext/voaacenc/gstvoaacenc.c @@ -19,16 +19,16 @@ /** * SECTION:element-voaacenc + * @title: voaacenc * - * AAC audio encoder based on vo-aacenc library + * AAC audio encoder based on vo-aacenc library * <ulink url="http://sourceforge.net/projects/opencore-amr/files/vo-aacenc/">vo-aacenc library source file</ulink>. - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch-1.0 filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voaacenc ! filesink location=abc.aac * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/voamrwbenc/gstvoamrwbenc.c b/ext/voamrwbenc/gstvoamrwbenc.c index f84e576a1..c5eae31d7 100644 --- a/ext/voamrwbenc/gstvoamrwbenc.c +++ b/ext/voamrwbenc/gstvoamrwbenc.c @@ -19,19 +19,19 @@ /** * SECTION:element-voamrwbenc + * @title: voamrwbenc * @see_also: #GstAmrWbDec, #GstAmrWbParse * - * AMR wideband encoder based on the + * AMR wideband encoder based on the * <ulink url="http://www.penguin.cz/~utx/amr">reference codec implementation</ulink>. - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voamrwbenc ! filesink location=abc.amr * ]| * Please note that the above stream misses the header, that is needed to play * the stream. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/vulkan/gstvulkan.c b/ext/vulkan/gstvulkan.c index 7158895a2..d553e8760 100644 --- a/ext/vulkan/gstvulkan.c +++ b/ext/vulkan/gstvulkan.c @@ -20,6 +20,7 @@ /** * SECTION:plugin-vulkan + * @title: vulkan * * Cross-platform Vulkan plugin. */ diff --git a/ext/vulkan/vkbuffermemory.c b/ext/vulkan/vkbuffermemory.c index e54d15930..d3b199e74 100644 --- a/ext/vulkan/vkbuffermemory.c +++ b/ext/vulkan/vkbuffermemory.c @@ -26,6 +26,7 @@ /** * SECTION:vkbuffermemory + * @title: vkbuffermemory * @short_description: memory subclass for Vulkan buffer memory * @see_also: #GstMemory, #GstAllocator * @@ -420,7 +421,7 @@ gst_vulkan_buffer_memory_init_once (void) /** * gst_is_vulkan_buffer_memory: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstVulkanBufferMemory */ gboolean diff --git a/ext/vulkan/vkbufferpool.c b/ext/vulkan/vkbufferpool.c index 01fb2fba8..df64358e1 100644 --- a/ext/vulkan/vkbufferpool.c +++ b/ext/vulkan/vkbufferpool.c @@ -26,6 +26,7 @@ /** * SECTION:vkbufferpool + * @title: GstVulkanBufferPool * @short_description: buffer pool for #GstVulkanBufferMemory objects * @see_also: #GstBufferPool, #GstVulkanBufferMemory * @@ -33,7 +34,7 @@ * * A #GstVulkanBufferPool is created with gst_vulkan_buffer_pool_new() * - * #GstVulkanBufferPool implements the VideoMeta buffer pool option + * #GstVulkanBufferPool implements the VideoMeta buffer pool option * #GST_BUFFER_POOL_OPTION_VIDEO_META */ diff --git a/ext/vulkan/vkimagememory.c b/ext/vulkan/vkimagememory.c index 074cf9ac0..025443416 100644 --- a/ext/vulkan/vkimagememory.c +++ b/ext/vulkan/vkimagememory.c @@ -26,6 +26,7 @@ /** * SECTION:vkimagememory + * @title: GstVkImageMemory * @short_description: memory subclass for Vulkan image memory * @see_also: #GstMemory, #GstAllocator * @@ -559,7 +560,7 @@ gst_vulkan_image_memory_init_once (void) /** * gst_is_vulkan_image_memory: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstVulkanImageMemory */ gboolean diff --git a/ext/vulkan/vkmemory.c b/ext/vulkan/vkmemory.c index 52855c13f..c21c34daa 100644 --- a/ext/vulkan/vkmemory.c +++ b/ext/vulkan/vkmemory.c @@ -28,11 +28,12 @@ /** * SECTION:vkmemory + * @title: GstVkMemory * @short_description: memory subclass for Vulkan device memory * @see_also: #GstMemory, #GstAllocator * * GstVulkanMemory is a #GstMemory subclass providing support for the mapping of - * Vulkan device memory. + * Vulkan device memory. */ /* WARNING: while suballocation is allowed, nothing prevents aliasing which @@ -347,7 +348,7 @@ gst_vulkan_memory_init_once (void) /** * gst_is_vulkan_memory: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstVulkanMemory */ gboolean diff --git a/ext/vulkan/vksink.c b/ext/vulkan/vksink.c index 586d2861d..c9893deeb 100644 --- a/ext/vulkan/vksink.c +++ b/ext/vulkan/vksink.c @@ -20,6 +20,7 @@ /** * SECTION:element-vulkansink + * @title: vulkansink * * vulkansink renders video frames to a drawable on a local or remote * display using Vulkan. diff --git a/ext/vulkan/vkupload.c b/ext/vulkan/vkupload.c index 4988c640a..8839630e9 100644 --- a/ext/vulkan/vkupload.c +++ b/ext/vulkan/vkupload.c @@ -20,6 +20,7 @@ /** * SECTION:element-vulkanupload + * @title: vulkanupload * * vulkanupload uploads data into Vulkan memory objects. */ diff --git a/ext/vulkan/vkwindow.c b/ext/vulkan/vkwindow.c index ab8c45c5f..6846e3156 100644 --- a/ext/vulkan/vkwindow.c +++ b/ext/vulkan/vkwindow.c @@ -19,7 +19,7 @@ */ /** - * SECTION:gstglwindow + * SECTION:vkwindow * @short_description: window/surface abstraction * @title: GstVulkanWindow * @see_also: #GstGLContext, #GstGLDisplay diff --git a/ext/wayland/gstwaylandsink.c b/ext/wayland/gstwaylandsink.c index 0d974196d..46a710fd0 100644 --- a/ext/wayland/gstwaylandsink.c +++ b/ext/wayland/gstwaylandsink.c @@ -23,18 +23,18 @@ /** * SECTION:element-waylandsink + * @title: waylandsink * * The waylandsink is creating its own window and render the decoded video frames to that. * Setup the Wayland environment as described in * <ulink url="http://wayland.freedesktop.org/building.html">Wayland</ulink> home page. * The current implementaion is based on weston compositor. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v videotestsrc ! waylandsink * ]| test the video rendering in wayland - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/wildmidi/gstwildmidi.c b/ext/wildmidi/gstwildmidi.c index 7f380d6da..89bdcebb3 100644 --- a/ext/wildmidi/gstwildmidi.c +++ b/ext/wildmidi/gstwildmidi.c @@ -21,6 +21,7 @@ /** * SECTION:element-wildmidi + * @title: wildmidi * @see_also: timidity * * This element renders midi-files as audio streams using @@ -29,13 +30,12 @@ * uses the same sound-patches as timidity (it tries the path in $WILDMIDI_CFG, * $HOME/.wildmidirc and /etc/wildmidi.cfg) * - * <refsect2> - * <title>Example pipeline</title> + * ## Example pipeline * |[ * gst-launch-1.0 filesrc location=song.mid ! wildmidi ! alsasink * ]| This example pipeline will parse the midi and render to raw audio which is * played via alsa. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/ext/x265/gstx265enc.c b/ext/x265/gstx265enc.c index b48ed0b07..2f22a4d21 100644 --- a/ext/x265/gstx265enc.c +++ b/ext/x265/gstx265enc.c @@ -22,6 +22,7 @@ /** * SECTION:element-x265enc + * @title: x265enc * * This element encodes raw video into H265 compressed data. * diff --git a/ext/zbar/gstzbar.c b/ext/zbar/gstzbar.c index e84f48c2f..18149eb5e 100644 --- a/ext/zbar/gstzbar.c +++ b/ext/zbar/gstzbar.c @@ -19,64 +19,31 @@ /** * SECTION:element-zbar + * @title: zbar * * Detect bar codes in the video streams and send them as element messages to * the #GstBus if .#GstZBar:message property is %TRUE. * If the .#GstZBar:attach-frame property is %TRUE, the posted barcode message * includes a sample of the frame where the barcode was detected (Since 1.6). * - * The element generate messages named - * <classname>"barcode"</classname>. The structure containes these - * fields: - * <itemizedlist> - * <listitem> - * <para> - * #GstClockTime - * <classname>"timestamp"</classname>: - * the timestamp of the buffer that triggered the message. - * </para> - * </listitem> - * <listitem> - * <para> - * gchar* - * <classname>"type"</classname>: - * the symbol type. - * </para> - * </listitem> - * <listitem> - * <para> - * gchar* - * <classname>"symbol"</classname>: - * the deteted bar code data. - * </para> - * </listitem> - * <listitem> - * <para> - * gint - * <classname>"quality"</classname>: - * an unscaled, relative quantity: larger values are better than smaller + * The element generate messages named`barcode`. The structure containes these fields: + * + * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message. + * * gchar * `type`: the symbol type. + * * gchar * `symbol`: the deteted bar code data. + * * gint `quality`: an unscaled, relative quantity: larger values are better than smaller * values. - * </para> - * </listitem> - * <listitem> - * <para> - * GstSample - * <classname>"frame"</classname>: - * the frame in which the barcode message was detected, if + * * GstSample `frame`: the frame in which the barcode message was detected, if * the .#GstZBar:attach-frame property was set to %TRUE (Since 1.6) - * </para> - * </listitem> - * </itemizedlist> * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 -m v4l2src ! videoconvert ! zbar ! videoconvert ! xvimagesink * ]| This pipeline will detect barcodes and send them as messages. * |[ * gst-launch-1.0 -m v4l2src ! tee name=t ! queue ! videoconvert ! zbar ! fakesink t. ! queue ! xvimagesink * ]| Same as above, but running the filter on a branch to keep the display in color - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst-libs/gst/base/gstaggregator.c b/gst-libs/gst/base/gstaggregator.c index d693541ea..d0ec234c8 100644 --- a/gst-libs/gst/base/gstaggregator.c +++ b/gst-libs/gst/base/gstaggregator.c @@ -21,46 +21,40 @@ */ /** * SECTION: gstaggregator + * @title: GstAggregator * @short_description: manages a set of pads with the purpose of * aggregating their buffers. * @see_also: gstcollectpads for historical reasons. * * Manages a set of pads with the purpose of aggregating their buffers. * Control is given to the subclass when all pads have data. - * <itemizedlist> - * <listitem><para> - * Base class for mixers and muxers. Subclasses should at least implement + * + * * Base class for mixers and muxers. Subclasses should at least implement * the #GstAggregatorClass.aggregate() virtual method. - * </para></listitem> - * <listitem><para> - * When data is queued on all pads, tha aggregate vmethod is called. - * </para></listitem> - * <listitem><para> - * One can peek at the data on any given GstAggregatorPad with the + * + * * When data is queued on all pads, tha aggregate vmethod is called. + * + * * One can peek at the data on any given GstAggregatorPad with the * gst_aggregator_pad_get_buffer () method, and take ownership of it * with the gst_aggregator_pad_steal_buffer () method. When a buffer * has been taken with steal_buffer (), a new buffer can be queued * on that pad. - * </para></listitem> - * <listitem><para> - * If the subclass wishes to push a buffer downstream in its aggregate + * + * * If the subclass wishes to push a buffer downstream in its aggregate * implementation, it should do so through the * gst_aggregator_finish_buffer () method. This method will take care * of sending and ordering mandatory events such as stream start, caps * and segment. - * </para></listitem> - * <listitem><para> - * Same goes for EOS events, which should not be pushed directly by the + * + * * Same goes for EOS events, which should not be pushed directly by the * subclass, it should instead return GST_FLOW_EOS in its aggregate * implementation. - * </para></listitem> - * <listitem><para> - * Note that the aggregator logic regarding gap event handling is to turn + * + * * Note that the aggregator logic regarding gap event handling is to turn * these into gap buffers with matching PTS and duration. It will also * flag these buffers with GST_BUFFER_FLAG_GAP and GST_BUFFER_FLAG_DROPPABLE * to ease their identification and subsequent processing. - * </para></listitem> - * </itemizedlist> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst-libs/gst/codecparsers/gsth264parser.c b/gst-libs/gst/codecparsers/gsth264parser.c index f3485ae33..86c5e44cc 100644 --- a/gst-libs/gst/codecparsers/gsth264parser.c +++ b/gst-libs/gst/codecparsers/gsth264parser.c @@ -29,40 +29,30 @@ /** * SECTION:gsth264parser + * @title: GstH264Parser * @short_description: Convenience library for h264 video * bitstream parsing. * * It offers bitstream parsing in both AVC (length-prefixed) and Annex B * (0x000001 start code prefix) format. To identify a NAL unit in a bitstream * and parse its headers, first call: - * <itemizedlist> - * <listitem> - * #gst_h264_parser_identify_nalu to identify a NAL unit in an Annex B type bitstream - * </listitem> - * <listitem> - * #gst_h264_parser_identify_nalu_avc to identify a NAL unit in an AVC type bitstream - * </listitem> - * </itemizedlist> + * + * * #gst_h264_parser_identify_nalu to identify a NAL unit in an Annex B type bitstream + * + * * #gst_h264_parser_identify_nalu_avc to identify a NAL unit in an AVC type bitstream * * The following functions are then available for parsing the structure of the * #GstH264NalUnit, depending on the #GstH264NalUnitType: - * <itemizedlist> - * <listitem> - * From #GST_H264_NAL_SLICE to #GST_H264_NAL_SLICE_IDR: #gst_h264_parser_parse_slice_hdr - * </listitem> - * <listitem> - * #GST_H264_NAL_SEI: #gst_h264_parser_parse_sei - * </listitem> - * <listitem> - * #GST_H264_NAL_SPS: #gst_h264_parser_parse_sps - * </listitem> - * <listitem> - * #GST_H264_NAL_PPS: #gst_h264_parser_parse_pps - * </listitem> - * <listitem> - * Any other: #gst_h264_parser_parse_nal - * </listitem> - * </itemizedlist> + * + * * From #GST_H264_NAL_SLICE to #GST_H264_NAL_SLICE_IDR: #gst_h264_parser_parse_slice_hdr + * + * * #GST_H264_NAL_SEI: #gst_h264_parser_parse_sei + * + * * #GST_H264_NAL_SPS: #gst_h264_parser_parse_sps + * + * * #GST_H264_NAL_PPS: #gst_h264_parser_parse_pps + * + * * Any other: #gst_h264_parser_parse_nal * * One of these functions *must* be called on every NAL unit in the bitstream, * in order to keep the internal structures of the #GstH264NalParser up to @@ -70,17 +60,13 @@ * type, if no special parsing of the current NAL unit is required by the * application. * - * For more details about the structures, look at the ITU-T H.264 and ISO/IEC 14496-10 – MPEG-4 + * For more details about the structures, look at the ITU-T H.264 and ISO/IEC 14496-10 – MPEG-4 * Part 10 specifications, available at: * - * <itemizedlist> - * <listitem> - * ITU-T H.264: http://www.itu.int/rec/T-REC-H.264 - * </listitem> - * <listitem> - * ISO/IEC 14496-10: http://www.iso.org/iso/iso_catalogue/catalogue_tc/catalogue_detail.htm?csnumber=56538 - * </listitem> - * </itemizedlist> + * * ITU-T H.264: http://www.itu.int/rec/T-REC-H.264 + * + * * ISO/IEC 14496-10: http://www.iso.org/iso/iso_catalogue/catalogue_tc/catalogue_detail.htm?csnumber=56538 + * */ #ifdef HAVE_CONFIG_H diff --git a/gst-libs/gst/codecparsers/gsth265parser.c b/gst-libs/gst/codecparsers/gsth265parser.c index 52a1fc2a7..fd56f15d0 100644 --- a/gst-libs/gst/codecparsers/gsth265parser.c +++ b/gst-libs/gst/codecparsers/gsth265parser.c @@ -22,43 +22,32 @@ /** * SECTION:gsth265parser + * @title: GstH265Parser * @short_description: Convenience library for h265 video bitstream parsing. * * It offers you bitstream parsing in HEVC mode and non-HEVC mode. To identify * Nals in a bitstream and parse its headers, you should call: - * <itemizedlist> - * <listitem> - * gst_h265_parser_identify_nalu() to identify the following nalu in + * + * * gst_h265_parser_identify_nalu() to identify the following nalu in * non-HEVC bitstreams - * </listitem> - * <listitem> - * gst_h265_parser_identify_nalu_hevc() to identify the nalu in + * + * * gst_h265_parser_identify_nalu_hevc() to identify the nalu in * HEVC bitstreams - * </listitem> - * </itemizedlist> * * Then, depending on the #GstH265NalUnitType of the newly parsed #GstH265NalUnit, * you should call the differents functions to parse the structure: - * <itemizedlist> - * <listitem> - * From #GST_H265_NAL_SLICE_TRAIL_N to #GST_H265_NAL_SLICE_CRA_NUT: gst_h265_parser_parse_slice_hdr() - * </listitem> - * <listitem> - * #GST_H265_NAL_SEI: gst_h265_parser_parse_sei() - * </listitem> - * <listitem> - * #GST_H265_NAL_VPS: gst_h265_parser_parse_vps() - * </listitem> - * <listitem> - * #GST_H265_NAL_SPS: gst_h265_parser_parse_sps() - * </listitem> - * <listitem> - * #GST_H265_NAL_PPS: #gst_h265_parser_parse_pps() - * </listitem> - * <listitem> - * Any other: gst_h265_parser_parse_nal() - * </listitem> - * </itemizedlist> + * + * * From #GST_H265_NAL_SLICE_TRAIL_N to #GST_H265_NAL_SLICE_CRA_NUT: gst_h265_parser_parse_slice_hdr() + * + * * #GST_H265_NAL_SEI: gst_h265_parser_parse_sei() + * + * * #GST_H265_NAL_VPS: gst_h265_parser_parse_vps() + * + * * #GST_H265_NAL_SPS: gst_h265_parser_parse_sps() + * + * * #GST_H265_NAL_PPS: #gst_h265_parser_parse_pps() + * + * * Any other: gst_h265_parser_parse_nal() * * Note: You should always call gst_h265_parser_parse_nal() if you don't * actually need #GstH265NalUnitType to be parsed for your personal use, in @@ -67,11 +56,8 @@ * For more details about the structures, look at the ITU-T H.265 * specifications, you can download them from: * - * <itemizedlist> - * <listitem> - * ITU-T H.265: http://www.itu.int/rec/T-REC-H.265 - * </listitem> - * </itemizedlist> + * * ITU-T H.265: http://www.itu.int/rec/T-REC-H.265 + * */ #ifdef HAVE_CONFIG_H diff --git a/gst-libs/gst/codecparsers/gstjpeg2000sampling.c b/gst-libs/gst/codecparsers/gstjpeg2000sampling.c index dba1c50fd..ab8321f20 100644 --- a/gst-libs/gst/codecparsers/gstjpeg2000sampling.c +++ b/gst-libs/gst/codecparsers/gstjpeg2000sampling.c @@ -20,6 +20,7 @@ /** * SECTION:gstjpeg2000sampling + * @title: GstJpeg2000Sampling * @short_description: Manage JPEG 2000 sampling and colorspace fields * */ diff --git a/gst-libs/gst/codecparsers/gstjpegparser.c b/gst-libs/gst/codecparsers/gstjpegparser.c index d927854e8..bbc1f181e 100644 --- a/gst-libs/gst/codecparsers/gstjpegparser.c +++ b/gst-libs/gst/codecparsers/gstjpegparser.c @@ -20,13 +20,11 @@ /** * SECTION:gstjpegparser + * @title: GstJpegParser * @short_description: Convenience library for JPEG bitstream parsing. * - * <refsect2> - * <para> * Provides useful functions for parsing JPEG images - * </para> - * </refsect2> + * */ #include <string.h> diff --git a/gst-libs/gst/codecparsers/gstmpeg4parser.c b/gst-libs/gst/codecparsers/gstmpeg4parser.c index b4557647b..977d98590 100644 --- a/gst-libs/gst/codecparsers/gstmpeg4parser.c +++ b/gst-libs/gst/codecparsers/gstmpeg4parser.c @@ -20,6 +20,7 @@ */ /** * SECTION:gstmpeg4parser + * @title: GstMpeg4Parser * @short_description: Convenience library for parsing mpeg4 part 2 video * bitstream. * diff --git a/gst-libs/gst/codecparsers/gstmpegvideoparser.c b/gst-libs/gst/codecparsers/gstmpegvideoparser.c index 2cfd32acc..6f1b49419 100644 --- a/gst-libs/gst/codecparsers/gstmpegvideoparser.c +++ b/gst-libs/gst/codecparsers/gstmpegvideoparser.c @@ -25,14 +25,12 @@ /** * SECTION:gstmpegvideoparser + * @title: GstMpegvideoParser * @short_description: Convenience library for mpeg1 and 2 video * bitstream parsing. * - * <refsect2> - * <para> * Provides useful functions for mpeg videos bitstream parsing. - * </para> - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst-libs/gst/codecparsers/gstvc1parser.c b/gst-libs/gst/codecparsers/gstvc1parser.c index 7ed73f846..ac579712d 100644 --- a/gst-libs/gst/codecparsers/gstvc1parser.c +++ b/gst-libs/gst/codecparsers/gstvc1parser.c @@ -20,6 +20,7 @@ */ /** * SECTION:gstvc1parser + * @title: GstVc1Parser * @short_description: Convenience library for parsing vc1 video * bitstream. * diff --git a/gst-libs/gst/codecparsers/gstvp8parser.c b/gst-libs/gst/codecparsers/gstvp8parser.c index 8de819341..7a13a89da 100644 --- a/gst-libs/gst/codecparsers/gstvp8parser.c +++ b/gst-libs/gst/codecparsers/gstvp8parser.c @@ -23,6 +23,7 @@ /** * SECTION:gstvp8parser + * @title: GstVp8Parser * @short_description: Convenience library for parsing vp8 video bitstream. * * For more details about the structures, you can refer to the diff --git a/gst-libs/gst/codecparsers/gstvp8parser.h b/gst-libs/gst/codecparsers/gstvp8parser.h index efe795060..43dcd12c2 100644 --- a/gst-libs/gst/codecparsers/gstvp8parser.h +++ b/gst-libs/gst/codecparsers/gstvp8parser.h @@ -72,7 +72,7 @@ typedef enum { * index * * Dequantization indices. - */ + */ struct _GstVp8QuantIndices { guint8 y_ac_qi; diff --git a/gst-libs/gst/codecparsers/gstvp9parser.c b/gst-libs/gst/codecparsers/gstvp9parser.c index 0bd05bca4..66e40db5c 100644 --- a/gst-libs/gst/codecparsers/gstvp9parser.c +++ b/gst-libs/gst/codecparsers/gstvp9parser.c @@ -22,6 +22,7 @@ */ /** * SECTION:gstvp9parser + * @title: GstVp9Parser * @short_description: Convenience library for parsing vp9 video bitstream. * * For more details about the structures, you can refer to the diff --git a/gst-libs/gst/gl/gstglapi.c b/gst-libs/gst/gl/gstglapi.c index 5ae9344e1..8b2fea505 100644 --- a/gst-libs/gst/gl/gstglapi.c +++ b/gst-libs/gst/gl/gstglapi.c @@ -20,6 +20,7 @@ /** * SECTION:gstglapi + * @title: GstGlApi * @short_description: OpenGL API specific functionality * @see_also: #GstGLDisplay, #GstGLContext * diff --git a/gst-libs/gst/gl/gstglbasememory.c b/gst-libs/gst/gl/gstglbasememory.c index 0a01dfeaf..69b1b8838 100644 --- a/gst-libs/gst/gl/gstglbasememory.c +++ b/gst-libs/gst/gl/gstglbasememory.c @@ -28,6 +28,7 @@ /** * SECTION:gstglbasememory + * @title: GstGlBaseMemory * @short_description: memory subclass for GL buffers * @see_also: #GstMemory, #GstAllocator * @@ -523,7 +524,7 @@ gst_gl_base_memory_allocator_init (GstGLBaseMemoryAllocator * allocator) /** * gst_is_gl_base_memory: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstGLBaseMemory * * Since: 1.8 diff --git a/gst-libs/gst/gl/gstglbuffer.c b/gst-libs/gst/gl/gstglbuffer.c index ba67e2d62..73c4b210a 100644 --- a/gst-libs/gst/gl/gstglbuffer.c +++ b/gst-libs/gst/gl/gstglbuffer.c @@ -29,11 +29,12 @@ /** * SECTION:gstglbuffer + * @title: GstGlBuffer * @short_description: memory subclass for GL buffers * @see_also: #GstMemory, #GstAllocator * * GstGLBuffer is a #GstMemory subclass providing support for the mapping of - * GL buffers. + * GL buffers. * * Data is uploaded or downloaded from the GPU as is necessary. */ @@ -473,7 +474,7 @@ gst_gl_buffer_init_once (void) /** * gst_is_gl_buffer: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstGLBuffer * * Since: 1.8 diff --git a/gst-libs/gst/gl/gstglbufferpool.c b/gst-libs/gst/gl/gstglbufferpool.c index a12f750e1..e886557ac 100644 --- a/gst-libs/gst/gl/gstglbufferpool.c +++ b/gst-libs/gst/gl/gstglbufferpool.c @@ -28,6 +28,7 @@ /** * SECTION:gstglbufferpool + * @title: GstGlBufferPool * @short_description: buffer pool for #GstGLBaseMemory objects * @see_also: #GstBufferPool, #GstGLBaseMemory, #GstGLMemory * @@ -35,7 +36,7 @@ * * A #GstGLBufferPool is created with gst_gl_buffer_pool_new() * - * #GstGLBufferPool implements the VideoMeta buffer pool option + * #GstGLBufferPool implements the VideoMeta buffer pool option * %GST_BUFFER_POOL_OPTION_VIDEO_META, the VideoAligment buffer pool option * %GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT as well as the OpenGL specific * %GST_BUFFER_POOL_OPTION_GL_SYNC_META buffer pool option. diff --git a/gst-libs/gst/gl/gstglcolorconvert.c b/gst-libs/gst/gl/gstglcolorconvert.c index 4373be5d3..89edb164c 100644 --- a/gst-libs/gst/gl/gstglcolorconvert.c +++ b/gst-libs/gst/gl/gstglcolorconvert.c @@ -31,6 +31,7 @@ /** * SECTION:gstglcolorconvert + * @title: GstGlColorConvert * @short_description: convert between video color spaces and formats * @see_also: #GstGLUpload, #GstGLMemory, #GstGLBaseMemory * @@ -1231,7 +1232,7 @@ gst_gl_color_convert_fixate_caps (GstGLContext * context, * @inbuf: (transfer none): the #GstGLMemory filled #GstBuffer to convert * * Converts the data contained by @inbuf using the formats specified by the - * #GstCaps passed to gst_gl_color_convert_set_caps() + * #GstCaps passed to gst_gl_color_convert_set_caps() * * Returns: (transfer full): a converted #GstBuffer or %NULL * diff --git a/gst-libs/gst/gl/gstglcontext.c b/gst-libs/gst/gl/gstglcontext.c index 792aac726..9af5e1cc8 100644 --- a/gst-libs/gst/gl/gstglcontext.c +++ b/gst-libs/gst/gl/gstglcontext.c @@ -376,7 +376,7 @@ gst_gl_context_new (GstGLDisplay * display) * @context_type: a #GstGLPlatform specifying the type of context in @handle * @available_apis: a #GstGLAPI containing the available OpenGL apis in @handle * - * Wraps an existing OpenGL context into a #GstGLContext. + * Wraps an existing OpenGL context into a #GstGLContext. * * Note: The caller is responsible for ensuring that the OpenGL context * represented by @handle stays alive while the returned #GstGLContext is diff --git a/gst-libs/gst/gl/gstgldisplay.c b/gst-libs/gst/gl/gstgldisplay.c index 54a135d92..b456b1fa4 100644 --- a/gst-libs/gst/gl/gstgldisplay.c +++ b/gst-libs/gst/gl/gstgldisplay.c @@ -27,7 +27,7 @@ * @title: GstGLDisplay * @see_also: #GstContext, #GstGLContext, #GstGLWindow * - * #GstGLDisplay represents a connection to the underlying windowing system. + * #GstGLDisplay represents a connection to the underlying windowing system. * Elements are required to make use of #GstContext to share and propogate * a #GstGLDisplay. * @@ -40,13 +40,13 @@ * - GST_GL_API influences the OpenGL API requested by the OpenGL platform. * Common values are 'opengl' and 'gles2'. * - * <note>Certain window systems require a special function to be called to - * initialize threading support. As this GStreamer GL library does not preclude - * concurrent access to the windowing system, it is strongly advised that - * applications ensure that threading support has been initialized before any - * other toolkit/library functionality is accessed. Failure to do so could - * result in sudden application abortion during execution. The most notably - * example of such a function is X11's XInitThreads().</note> + * > Certain window systems require a special function to be called to + * > initialize threading support. As this GStreamer GL library does not preclude + * > concurrent access to the windowing system, it is strongly advised that + * > applications ensure that threading support has been initialized before any + * > other toolkit/library functionality is accessed. Failure to do so could + * > result in sudden application abortion during execution. The most notably + * > example of such a function is X11's XInitThreads\(). */ #ifdef HAVE_CONFIG_H diff --git a/gst-libs/gst/gl/gstglfilter.c b/gst-libs/gst/gl/gstglfilter.c index 61cce0286..e702a1ecf 100644 --- a/gst-libs/gst/gl/gstglfilter.c +++ b/gst-libs/gst/gl/gstglfilter.c @@ -22,6 +22,7 @@ /** * SECTION:gstglfilter + * @title: GstGlFilter * @short_description: GstBaseTransform subclass for dealing with RGBA textures * @see_also: #GstBaseTransform, #GstGLContext, #GstGLFramebuffer * @@ -1174,7 +1175,7 @@ _unbind_buffer (GstGLFilter * filter) * gst_gl_filter_draw_fullscreen_quad: * @filter: a #GstGLFilter * - * Render a fullscreen quad using the current GL state. The only GL state this + * Render a fullscreen quad using the current GL state. The only GL state this * modifies is the necessary vertex/index buffers and, if necessary, a * Vertex Array Object for drawing a fullscreen quad. Framebuffer state, * any shaders, viewport state, etc must be setup by the caller. diff --git a/gst-libs/gst/gl/gstglformat.c b/gst-libs/gst/gl/gstglformat.c index db66f7f83..36aecf2e3 100644 --- a/gst-libs/gst/gl/gstglformat.c +++ b/gst-libs/gst/gl/gstglformat.c @@ -20,6 +20,7 @@ /** * SECTION:gstglformat + * @title: GstGlFormat * @short_description: utilities for dealing with OpenGL formats * @see_also: #GstGLBaseMemory, #GstGLMemory, #GstGLFramebuffer, #GstGLBuffer * diff --git a/gst-libs/gst/gl/gstglmemory.c b/gst-libs/gst/gl/gstglmemory.c index 25f67d7dc..5e6932146 100644 --- a/gst-libs/gst/gl/gstglmemory.c +++ b/gst-libs/gst/gl/gstglmemory.c @@ -31,11 +31,12 @@ /** * SECTION:gstglmemory + * @title: GstGlMemory * @short_description: memory subclass for GL textures * @see_also: #GstMemory, #GstAllocator, #GstGLBufferPool * * GstGLMemory is a #GstGLBaseMemory subclass providing support for the mapping of - * OpenGL textures. + * OpenGL textures. * * #GstGLMemory is created or wrapped through gst_gl_base_memory_alloc() * with #GstGLVideoAllocationParams. @@ -1092,7 +1093,7 @@ gst_gl_memory_init_once (void) /** * gst_is_gl_memory: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstGLMemory * * Since: 1.4 diff --git a/gst-libs/gst/gl/gstglmemorypbo.c b/gst-libs/gst/gl/gstglmemorypbo.c index 7ccef2772..4ae7125a3 100644 --- a/gst-libs/gst/gl/gstglmemorypbo.c +++ b/gst-libs/gst/gl/gstglmemorypbo.c @@ -31,6 +31,7 @@ /** * SECTION:gstglmemorypbo + * @title: GstGLMemoryPBO * @short_description: memory subclass for GL textures * @see_also: #GstMemory, #GstAllocator, #GstGLBufferPool * @@ -846,7 +847,7 @@ gst_gl_memory_pbo_init_once (void) /** * gst_is_gl_memory_pbo: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstGLMemoryPBO * * Since: 1.8 diff --git a/gst-libs/gst/gl/gstgloverlaycompositor.c b/gst-libs/gst/gl/gstgloverlaycompositor.c index 354c1efa5..16c123cd0 100644 --- a/gst-libs/gst/gl/gstgloverlaycompositor.c +++ b/gst-libs/gst/gl/gstgloverlaycompositor.c @@ -20,6 +20,7 @@ /** * SECTION:gstgloverlaycompositor + * @title: GstGLOverlayCompositor * @short_description: Composite multiple overlays using OpenGL * @see_also: #GstGLMemory, #GstGLContext */ diff --git a/gst-libs/gst/gl/gstglquery.c b/gst-libs/gst/gl/gstglquery.c index bae5b8cb0..0cf0f75ad 100644 --- a/gst-libs/gst/gl/gstglquery.c +++ b/gst-libs/gst/gl/gstglquery.c @@ -22,7 +22,7 @@ * SECTION:gstglquery * @short_description: OpenGL query abstraction * @title: GstGLQuery - * @see_also: + * @see_also: * * A #GstGLQuery represents and holds an OpenGL query object. Various types of * queries can be run or counters retrieved. diff --git a/gst-libs/gst/gl/gstglrenderbuffer.c b/gst-libs/gst/gl/gstglrenderbuffer.c index 154ec0049..72f95f853 100644 --- a/gst-libs/gst/gl/gstglrenderbuffer.c +++ b/gst-libs/gst/gl/gstglrenderbuffer.c @@ -31,11 +31,12 @@ /** * SECTION:gstglrenderbuffer + * @title: GstGLRenderBuffer * @short_description: memory subclass for GL renderbuffer objects * @see_also: #GstMemory, #GstAllocator * * GstGLRenderbuffer is a #GstGLBaseMemory subclass providing support for - * OpenGL renderbuffers. + * OpenGL renderbuffers. * * #GstGLRenderbuffer is created or wrapped through gst_gl_base_memory_alloc() * with #GstGLRenderbufferAllocationParams. @@ -329,7 +330,7 @@ gst_gl_renderbuffer_init_once (void) /** * gst_is_gl_renderbuffer: * @mem:a #GstMemory - * + * * Returns: whether the memory at @mem is a #GstGLRenderbuffer * * Since: 1.10 diff --git a/gst-libs/gst/gl/gstglshader.c b/gst-libs/gst/gl/gstglshader.c index ba10563c3..c15568dbc 100644 --- a/gst-libs/gst/gl/gstglshader.c +++ b/gst-libs/gst/gl/gstglshader.c @@ -29,6 +29,7 @@ /** * SECTION:gstglshader + * @title: GstGLShader * @short_description: object representing an OpenGL shader program * @see_also: #GstGLSLStage */ diff --git a/gst-libs/gst/gl/gstglsl.c b/gst-libs/gst/gl/gstglsl.c index 2a9fa4f23..78fc45d83 100644 --- a/gst-libs/gst/gl/gstglsl.c +++ b/gst-libs/gst/gl/gstglsl.c @@ -29,6 +29,7 @@ /** * SECTION:gstglsl + * @title: GstGLSL * @short_description: helpers for dealing with OpenGL shaders * @see_also: #GstGLSLStage, #GstGLShader */ diff --git a/gst-libs/gst/gl/gstglsyncmeta.c b/gst-libs/gst/gl/gstglsyncmeta.c index b8822704d..f69e344d0 100644 --- a/gst-libs/gst/gl/gstglsyncmeta.c +++ b/gst-libs/gst/gl/gstglsyncmeta.c @@ -20,6 +20,7 @@ /** * SECTION:gstglsyncmeta + * @title: GstGLSyncMeta * @short_description: synchronization primitives * @see_also: #GstGLBaseMemory, #GstGLContext * diff --git a/gst-libs/gst/gl/gstglupload.c b/gst-libs/gst/gl/gstglupload.c index f19cd6ff5..9b81892b0 100644 --- a/gst-libs/gst/gl/gstglupload.c +++ b/gst-libs/gst/gl/gstglupload.c @@ -43,6 +43,7 @@ /** * SECTION:gstglupload + * @title: GstGLUpload * @short_description: an object that uploads to GL textures * @see_also: #GstGLDownload, #GstGLMemory * diff --git a/gst-libs/gst/gl/gstglutils.c b/gst-libs/gst/gl/gstglutils.c index 2b97111f1..16a866eb3 100644 --- a/gst-libs/gst/gl/gstglutils.c +++ b/gst-libs/gst/gl/gstglutils.c @@ -20,6 +20,7 @@ /** * SECTION:gstglutils + * @title: GstGLUtils * @short_description: some miscellaneous utilities for OpenGL * @see_also: #GstGLContext */ diff --git a/gst-libs/gst/gl/gstglviewconvert.c b/gst-libs/gst/gl/gstglviewconvert.c index 86f877e13..acb9c8504 100644 --- a/gst-libs/gst/gl/gstglviewconvert.c +++ b/gst-libs/gst/gl/gstglviewconvert.c @@ -22,6 +22,7 @@ /** * SECTION:gstglviewconvert + * @title: GstGLViewConvert * @short_description: convert between steroscopic/multiview video formats * @see_also: #GstGLColorConvert, #GstGLContext * @@ -1432,7 +1433,7 @@ gst_gl_view_convert_get_property (GObject * object, guint prop_id, * @inbuf: (transfer none): the #GstGLMemory filled #GstBuffer to convert * * Converts the data contained by @inbuf using the formats specified by the - * #GstCaps passed to gst_gl_view_convert_set_caps() + * #GstCaps passed to gst_gl_view_convert_set_caps() * * Returns: (transfer full): a converted #GstBuffer or %NULL * diff --git a/gst-libs/gst/mpegts/gst-dvb-descriptor.h b/gst-libs/gst/mpegts/gst-dvb-descriptor.h index e7f6e7051..b0fa96507 100644 --- a/gst-libs/gst/mpegts/gst-dvb-descriptor.h +++ b/gst-libs/gst/mpegts/gst-dvb-descriptor.h @@ -290,10 +290,10 @@ gboolean gst_mpegts_descriptor_parse_dvb_bouquet_name (const GstMpegtsDescriptor /* GST_MTS_DESC_DVB_SERVICE (0x48) */ /** * GstMpegtsDVBServiceType: - * + * * The type of service of a channel. * - * As specified in Table 87 of ETSI EN 300 468 v1.13.1 + * As specified in Table 87 of ETSI EN 300 468 v1.13.1 */ typedef enum { GST_DVB_SERVICE_RESERVED_00 = 0x00, diff --git a/gst-libs/gst/mpegts/gst-scte-section.h b/gst-libs/gst/mpegts/gst-scte-section.h index 76c501187..babab342d 100644 --- a/gst-libs/gst/mpegts/gst-scte-section.h +++ b/gst-libs/gst/mpegts/gst-scte-section.h @@ -1,7 +1,7 @@ /* - * gst-scte-section.h - + * gst-scte-section.h - * Copyright (C) 2013, CableLabs, Louisville, CO 80027 - * + * * Authors: * RUIH Team <ruih@cablelabs.com> * diff --git a/gst-libs/gst/player/gstplayer-g-main-context-signal-dispatcher.c b/gst-libs/gst/player/gstplayer-g-main-context-signal-dispatcher.c index c1f57d07a..a1cc541cb 100644 --- a/gst-libs/gst/player/gstplayer-g-main-context-signal-dispatcher.c +++ b/gst-libs/gst/player/gstplayer-g-main-context-signal-dispatcher.c @@ -20,6 +20,7 @@ /** * SECTION:gstplayer-gmaincontextsignaldispatcher + * @title: GstPlayerGMainContextSignalDispatcher * @short_description: Player GLib MainContext dispatcher * */ diff --git a/gst-libs/gst/player/gstplayer-media-info.c b/gst-libs/gst/player/gstplayer-media-info.c index 48e4725bc..307d3e56a 100644 --- a/gst-libs/gst/player/gstplayer-media-info.c +++ b/gst-libs/gst/player/gstplayer-media-info.c @@ -20,6 +20,7 @@ /** * SECTION:gstplayer-mediainfo + * @title: GstPlayerMediaInfo * @short_description: Player Media Information * */ diff --git a/gst-libs/gst/player/gstplayer-video-overlay-video-renderer.c b/gst-libs/gst/player/gstplayer-video-overlay-video-renderer.c index 2ba326782..a25b008cd 100644 --- a/gst-libs/gst/player/gstplayer-video-overlay-video-renderer.c +++ b/gst-libs/gst/player/gstplayer-video-overlay-video-renderer.c @@ -20,6 +20,7 @@ /** * SECTION:gstplayer-videooverlayvideorenderer + * @title: GstPlayerVideoOverlayVideoRenderer * @short_description: Player Video Overlay Video Renderer * */ diff --git a/gst-libs/gst/player/gstplayer-visualization.c b/gst-libs/gst/player/gstplayer-visualization.c index 79976a7b2..0f425a6f6 100644 --- a/gst-libs/gst/player/gstplayer-visualization.c +++ b/gst-libs/gst/player/gstplayer-visualization.c @@ -21,6 +21,7 @@ /** * SECTION:gstplayer-visualization + * @title: GstPlayerVisualization * @short_description: Player Visualization * */ diff --git a/gst-libs/gst/player/gstplayer.c b/gst-libs/gst/player/gstplayer.c index e15e32564..273a480b4 100644 --- a/gst-libs/gst/player/gstplayer.c +++ b/gst-libs/gst/player/gstplayer.c @@ -21,6 +21,7 @@ /** * SECTION:gstplayer + * @title: GstPlayer * @short_description: Player * */ diff --git a/gst-libs/gst/video/gstvideoaggregator.c b/gst-libs/gst/video/gstvideoaggregator.c index e17c111d4..1616b2724 100644 --- a/gst-libs/gst/video/gstvideoaggregator.c +++ b/gst-libs/gst/video/gstvideoaggregator.c @@ -20,6 +20,7 @@ /** * SECTION:gstvideoaggregator + * @title: GstVideoAggregator * @short_description: Base class for video aggregators * * VideoAggregator can accept AYUV, ARGB and BGRA video streams. For each of the requested diff --git a/gst/accurip/gstaccurip.c b/gst/accurip/gstaccurip.c index f6423b7e6..05578d4e1 100644 --- a/gst/accurip/gstaccurip.c +++ b/gst/accurip/gstaccurip.c @@ -33,6 +33,7 @@ /** * SECTION:element-accurip + * @title: accurip * @short_desc: Computes an AccurateRip CRC * * The accurip element calculates a CRC for an audio stream which can be used @@ -40,12 +41,11 @@ * <ulink url="http://accuraterip.com/">AccurateRip</ulink>. This database * is used to check for a CD rip accuracy. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -m uridecodebin uri=file:///path/to/song.flac ! audioconvert ! accurip ! fakesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/aiff/aiffmux.c b/gst/aiff/aiffmux.c index 0f6d33036..96cac95bb 100644 --- a/gst/aiff/aiffmux.c +++ b/gst/aiff/aiffmux.c @@ -42,6 +42,7 @@ /** * SECTION:element-aiffmux + * @title: aiffmux * * Format an audio stream into the Audio Interchange File Format * diff --git a/gst/aiff/aiffparse.c b/gst/aiff/aiffparse.c index faa264770..b1b116703 100644 --- a/gst/aiff/aiffparse.c +++ b/gst/aiff/aiffparse.c @@ -23,30 +23,26 @@ /** * SECTION:element-aiffparse + * @title: aiffparse * - * <refsect2> - * <para> * Parse a .aiff file into raw or compressed audio. - * </para> - * <para> + * * The aiffparse element supports both push and pull mode operations, making it * possible to stream from a network source. - * </para> - * <title>Example launch line</title> - * <para> - * <programlisting> + * + * ## Example launch line + * + * |[ * gst-launch-1.0 filesrc location=sine.aiff ! aiffparse ! audioconvert ! alsasink - * </programlisting> + * ]| * Read a aiff file and output to the soundcard using the ALSA element. The * aiff file is assumed to contain raw uncompressed samples. - * </para> - * <para> - * <programlisting> + * + * |[ * gst-launch-1.0 souphttpsrc location=http://www.example.org/sine.aiff ! queue ! aiffparse ! audioconvert ! alsasink - * </programlisting> + * ]| * Stream data from a network url. - * </para> - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/asfmux/gstasfmux.c b/gst/asfmux/gstasfmux.c index 3579a554d..9896019ff 100644 --- a/gst/asfmux/gstasfmux.c +++ b/gst/asfmux/gstasfmux.c @@ -24,6 +24,7 @@ /** * SECTION:element-asfmux + * @title: asfmux * * Muxes media into an ASF file/stream. * @@ -31,9 +32,9 @@ * stream number of the stream that goes through that pad. Stream numbers * are assigned sequentially, starting from 1. * - * <refsect2> - * <title>Example launch lines</title> - * <para>(write everything in one line, without the backslash characters)</para> + * ## Example launch lines + * + * (write everything in one line, without the backslash characters) * |[ * gst-launch-1.0 videotestsrc num-buffers=250 \ * ! "video/x-raw,format=(string)I420,framerate=(fraction)25/1" ! avenc_wmv2 \ @@ -43,15 +44,15 @@ * ]| This creates an ASF file containing an WMV video stream * with a test picture and WMA audio stream of a test sound. * - * <title>Live streaming</title> + * ## Live streaming * asfmux and rtpasfpay are capable of generating a live asf stream. - * asfmux has to set its 'streamable' property to true, because in this + * asfmux has to set its 'streamable' property to true, because in this * mode it won't try to seek back to the start of the file to replace * some fields that couldn't be known at the file start. In this mode, * it won't also send indexes at the end of the data packets (the actual * media content) * the following pipelines are an example of this usage. - * <para>(write everything in one line, without the backslash characters)</para> + * (write everything in one line, without the backslash characters) * Server (sender) * |[ * gst-launch-1.0 -ve videotestsrc ! avenc_wmv2 ! asfmux name=mux streamable=true \ @@ -65,7 +66,7 @@ * ! videoconvert ! autovideosink \ * d. ! queue ! audioconvert ! autoaudiosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H @@ -421,7 +422,7 @@ content_description_calc_size_for_tag (const GstTagList * taglist, text = g_value_get_string (&value); /* +1 -> because of the \0 at the end - * 2* -> because we have uft8, and asf demands utf16 + * 2* -> because we have uft8, and asf demands utf16 */ content_size = 2 * (1 + g_utf8_strlen (text, -1)); @@ -442,7 +443,7 @@ content_description_calc_size_for_tag (const GstTagList * taglist, /* size of the tag content in utf16 + * size of the tag name + * 3 uint16 (size of the tag name string, - * size of the tag content string and + * size of the tag content string and * type of content */ asftags->ext_cont_desc_size += content_size + @@ -465,7 +466,7 @@ content_description_calc_size_for_tag (const GstTagList * taglist, * size needed for the default and extended content description objects. * This results and a copy of the #GstTagList * are stored in the #GstAsfTags. We store a copy so that - * the sizes estimated here mantain the same until they are + * the sizes estimated here mantain the same until they are * written to the asf file. */ static void @@ -531,7 +532,7 @@ add_metadata_tag_size (const GstTagList * taglist, const gchar * tag, text = g_value_get_string (&value); /* +1 -> because of the \0 at the end - * 2* -> because we have uft8, and asf demands utf16 + * 2* -> because we have uft8, and asf demands utf16 */ content_size = 2 * (1 + g_utf8_strlen (text, -1)); } @@ -849,7 +850,7 @@ gst_asf_mux_write_extended_stream_properties (GstAsfMux * asfmux, guint8 ** buf, * @size_buf: pointer to the memory position to write the size of the string * @str_buf: pointer to the memory position to write the string * @str: the string to be writen (in UTF-8) - * @use32: if the string size should be writen with 32 bits (if true) + * @use32: if the string size should be writen with 32 bits (if true) * or with 16 (if false) * * Writes a string with its size as it is needed in many asf objects. @@ -870,7 +871,7 @@ gst_asf_mux_write_string_with_size (GstAsfMux * asfmux, GST_LOG_OBJECT (asfmux, "Writing extended content description string: " "%s", str); - /* + /* * Covert the string to utf16 * Also force the last bytes to null terminated, * tags were with extra weird characters without it. @@ -909,7 +910,7 @@ gst_asf_mux_write_string_with_size (GstAsfMux * asfmux, * * Checks if a string tag with tagname exists in the taglist. If it * exists it is writen as an UTF-16LE to data_buf and its size in bytes - * is writen to size_buf. It is used for writing content description + * is writen to size_buf. It is used for writing content description * object fields. * * Returns: the size of the string @@ -1191,7 +1192,7 @@ gst_asf_mux_write_data_object (GstAsfMux * asfmux, guint8 ** buf) /* Data object size. This is always >= ASF_DATA_OBJECT_SIZE. The standard * specifically accepts the value 0 in live streams, but WMP is not accepting * this while streaming using WMSP, so we default to minimum size also for - * live streams. Otherwise this field must be updated later on when we know + * live streams. Otherwise this field must be updated later on when we know * the complete stream size. */ GST_WRITE_UINT64_LE (*buf + 16, ASF_DATA_OBJECT_SIZE); @@ -1338,7 +1339,7 @@ gst_asf_mux_start_file (GstAsfMux * asfmux) gst_asf_mux_write_header_object (asfmux, &bufdata, map.size - ASF_DATA_OBJECT_SIZE, 2 + stream_num); - /* get the position of the file properties object for + /* get the position of the file properties object for * updating it in gst_asf_mux_stop_file */ asfmux->file_properties_object_position = bufdata - map.data; gst_asf_mux_write_file_properties (asfmux, &bufdata); @@ -1765,7 +1766,7 @@ cleanup_and_return: /** * gst_asf_mux_stop_file: * @asfmux: #GstAsfMux - * + * * Finalizes the asf stream by pushing the indexes after * the data object. Also seeks back to the header positions * to rewrite some fields such as the total number of bytes @@ -1802,7 +1803,7 @@ gst_asf_mux_stop_file (GstAsfMux * asfmux) play_duration = pad->play_duration; } - /* going back to file properties object to fill in + /* going back to file properties object to fill in * values we didn't know back then */ GST_DEBUG_OBJECT (asfmux, "Sending new segment to file properties object position"); @@ -2134,7 +2135,7 @@ gst_asf_mux_audio_set_caps (GstPad * pad, GstCaps * caps) audiopad->audioinfo.rate = (guint32) rate; /* taken from avimux - * codec initialization data, if any + * codec initialization data, if any */ codec_data = gst_structure_get_value (structure, "codec_data"); if (codec_data) { @@ -2227,7 +2228,7 @@ gst_asf_mux_video_set_caps (GstPad * pad, GstCaps * caps) videopad->vidinfo.height = (gint32) height; /* taken from avimux - * codec initialization data, if any + * codec initialization data, if any */ codec_data = gst_structure_get_value (structure, "codec_data"); if (codec_data) { diff --git a/gst/asfmux/gstasfobjects.c b/gst/asfmux/gstasfobjects.c index c2b112926..f8455d970 100644 --- a/gst/asfmux/gstasfobjects.c +++ b/gst/asfmux/gstasfobjects.c @@ -58,7 +58,7 @@ const Guid guids[] = { /** * gst_asf_generate_file_id: - * + * * Generates a random GUID * * Returns: The generated GUID @@ -146,7 +146,7 @@ gst_asf_read_var_size_field (guint8 * data, guint8 field_type) /** * gst_asf_get_var_size_field_len: * @field_type: the asf field type - * + * * Returns: the size in bytes of a variable of field_type type */ guint @@ -168,7 +168,7 @@ gst_asf_get_var_size_field_len (guint8 field_type) * gst_asf_file_info_new: * * Creates a new #GstAsfFileInfo - * + * * Returns: the created struct */ GstAsfFileInfo * @@ -180,7 +180,7 @@ gst_asf_file_info_new (void) /** * gst_asf_file_info_reset: * @info: the #GstAsfFileInfo to be reset - * + * * resets the data of a #GstFileInfo */ void diff --git a/gst/audiofxbad/gstaudiochannelmix.c b/gst/audiofxbad/gstaudiochannelmix.c index 7d92a5b26..84e1049f7 100644 --- a/gst/audiofxbad/gstaudiochannelmix.c +++ b/gst/audiofxbad/gstaudiochannelmix.c @@ -18,18 +18,18 @@ */ /** * SECTION:element-gstaudiochannelmix + * @title: gstaudiochannelmix * * The audiochannelmix element mixes channels in stereo audio based on * properties set on the element. The primary purpose is reconstruct * equal left/right channels on an input stream that has audio in only * one channel. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v audiotestsrc ! audiochannelmix ! autoaudiosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/audiomixer/gstaudiointerleave.c b/gst/audiomixer/gstaudiointerleave.c index f4e9fa107..8d99b3ce1 100644 --- a/gst/audiomixer/gstaudiointerleave.c +++ b/gst/audiomixer/gstaudiointerleave.c @@ -27,7 +27,7 @@ */ /** * SECTION:element-audiointerleave - * + * @title: audiointerleave * */ @@ -593,7 +593,7 @@ gst_audio_interleave_class_init (GstAudioInterleaveClass * klass) /** * GstInterleave:channel-positions - * + * * Channel positions: This property controls the channel positions * that are used on the src caps. The number of elements should be * the same as the number of sink pads and the array should contain @@ -617,7 +617,7 @@ gst_audio_interleave_class_init (GstAudioInterleaveClass * klass) /** * GstInterleave:channel-positions-from-input - * + * * Channel positions from input: If this property is set to %TRUE the channel * positions will be taken from the input caps if valid channel positions for * the output can be constructed from them. If this is set to %TRUE setting the diff --git a/gst/audiomixer/gstaudiomixer.c b/gst/audiomixer/gstaudiomixer.c index de539db82..ac2f49c04 100644 --- a/gst/audiomixer/gstaudiomixer.c +++ b/gst/audiomixer/gstaudiomixer.c @@ -23,6 +23,7 @@ */ /** * SECTION:element-audiomixer + * @title: audiomixer * * The audiomixer allows to mix several streams into one by adding the data. * Mixed data is clamped to the min/max values of the data format. @@ -32,21 +33,13 @@ * The input pads are from a GstPad subclass and have additional * properties to mute each pad individually and set the volume: * - * <itemizedlist> - * <listitem> - * "mute": Whether to mute the pad or not (#gboolean) - * </listitem> - * <listitem> - * "volume": The volume of the pad, between 0.0 and 10.0 (#gdouble) - * </listitem> - * </itemizedlist> + * * "mute": Whether to mute the pad or not (#gboolean) + * * "volume": The volume of the pad, between 0.0 and 10.0 (#gdouble) * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 audiotestsrc freq=100 ! audiomixer name=mix ! audioconvert ! alsasink audiotestsrc freq=500 ! mix. * ]| This pipeline produces two sine waves mixed together. - * </refsect2> * */ diff --git a/gst/audiomixmatrix/gstaudiomixmatrix.c b/gst/audiomixmatrix/gstaudiomixmatrix.c index 6dd59ffd9..75f4a970a 100644 --- a/gst/audiomixmatrix/gstaudiomixmatrix.c +++ b/gst/audiomixmatrix/gstaudiomixmatrix.c @@ -22,6 +22,7 @@ /** * SECTION:element-audiomixmatrix + * @title: audiomixmatrix * @short_description: Transform input/output channels according to a matrix * * This element transforms a given number of input channels into a given @@ -32,8 +33,7 @@ * are automatically negotiated and the transformation matrix is a truncated * identity matrix. * - * <refsect2> - * <title>Example matrix generation code</title> + * ## Example matrix generation code * To generate the matrix using code: * * |[ @@ -54,14 +54,12 @@ * g_object_set_property (G_OBJECT (audiomixmatrix), "matrix", &v); * g_value_unset (&v); * ]| - * </refsect2> * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 audiotestsrc ! audio/x-raw,channels=4 ! audiomixmatrix in-channels=4 out-channels=2 channel-mask=-1 matrix="<<(double)1, (double)0, (double)0, (double)0>, <0.0, 1.0, 0.0, 0.0>>" ! audio/x-raw,channels=2 ! autoaudiosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/audiovisualizers/gstspacescope.c b/gst/audiovisualizers/gstspacescope.c index 5071dc8b1..3e59609f3 100644 --- a/gst/audiovisualizers/gstspacescope.c +++ b/gst/audiovisualizers/gstspacescope.c @@ -20,17 +20,17 @@ */ /** * SECTION:element-spacescope + * @title: spacescope * @see_also: goom * * Spacescope is a simple audio visualisation element. It maps the left and * right channel to x and y coordinates. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 audiotestsrc ! audioconvert ! spacescope ! ximagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/gst/audiovisualizers/gstspectrascope.c b/gst/audiovisualizers/gstspectrascope.c index 10aa7d2f8..5ba73ba11 100644 --- a/gst/audiovisualizers/gstspectrascope.c +++ b/gst/audiovisualizers/gstspectrascope.c @@ -21,17 +21,17 @@ */ /** * SECTION:element-spectrascope + * @title: spectrascope * @see_also: goom * * Spectrascope is a simple spectrum visualisation element. It renders the * frequency spectrum as a series of bars. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 audiotestsrc ! audioconvert ! spectrascope ! ximagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/gst/audiovisualizers/gstsynaescope.c b/gst/audiovisualizers/gstsynaescope.c index f99a753ea..4b5c0ed5a 100644 --- a/gst/audiovisualizers/gstsynaescope.c +++ b/gst/audiovisualizers/gstsynaescope.c @@ -20,17 +20,17 @@ */ /** * SECTION:element-synaescope + * @title: synaescope * @see_also: goom * * Synaescope is an audio visualisation element. It analyzes frequencies and * out-of phase properties of audio and draws this as clouds of stars. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 audiotestsrc ! audioconvert ! synaescope ! ximagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/gst/audiovisualizers/gstwavescope.c b/gst/audiovisualizers/gstwavescope.c index e4eb8c2fb..c934daa4d 100644 --- a/gst/audiovisualizers/gstwavescope.c +++ b/gst/audiovisualizers/gstwavescope.c @@ -20,17 +20,17 @@ */ /** * SECTION:element-wavescope + * @title: wavescope * @see_also: goom * * Wavescope is a simple audio visualisation element. It renders the waveforms * like on an oscilloscope. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 audiotestsrc ! audioconvert ! wavescope ! ximagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/gst/autoconvert/gstautoconvert.c b/gst/autoconvert/gstautoconvert.c index 4761f4403..bed5e88e7 100644 --- a/gst/autoconvert/gstautoconvert.c +++ b/gst/autoconvert/gstautoconvert.c @@ -21,6 +21,7 @@ */ /** * SECTION:element-autoconvert + * @title: autoconvert * * The #autoconvert element has one sink and one source pad. It will look for * other elements that also have one sink and one source pad. diff --git a/gst/bayer/gstbayer2rgb.c b/gst/bayer/gstbayer2rgb.c index de356edff..a6d24eab1 100644 --- a/gst/bayer/gstbayer2rgb.c +++ b/gst/bayer/gstbayer2rgb.c @@ -23,6 +23,7 @@ /** * SECTION:element-bayer2rgb + * @title: bayer2rgb * * Decodes raw camera bayer (fourcc BA81) to RGB. */ diff --git a/gst/camerabin2/camerabingeneral.c b/gst/camerabin2/camerabingeneral.c index 45a0df46a..799c86d3a 100644 --- a/gst/camerabin2/camerabingeneral.c +++ b/gst/camerabin2/camerabingeneral.c @@ -20,6 +20,7 @@ /** * SECTION:camerabingeneral + * @title: GstCameraBin2 * @short_description: helper functions for #GstCameraBin2 and it's modules * * Common helper functions for #GstCameraBin2, #GstCameraBin2Image and diff --git a/gst/camerabin2/gstcamerabin2.c b/gst/camerabin2/gstcamerabin2.c index 24bb4fbda..0e4a3c727 100644 --- a/gst/camerabin2/gstcamerabin2.c +++ b/gst/camerabin2/gstcamerabin2.c @@ -18,33 +18,22 @@ */ /** * SECTION:element-camerabin + * @title: camerabin * * CameraBin is a high-level camera object that encapsulates gstreamer * elements, providing an API for controlling a digital camera. * - * <note> - * Note that camerabin is still UNSTABLE and under development. - * </note> + * > Note that camerabin is still UNSTABLE and under development. * * CameraBin has the following main features: - * <itemizedlist> - * <listitem> - * Record videos - * </listitem> - * <listitem> - * Capture pictures - * </listitem> - * <listitem> - * Display a viewfinder - * </listitem> - * <listitem> - * Post preview images for each capture (video and image) - * </listitem> - * </itemizedlist> * - * <refsect2> - * <title>Usage</title> - * <para> + * * Record videos + * * Capture pictures + * * Display a viewfinder + * * Post preview images for each capture (video and image) + * + * ## Usage + * * Camerabin can be created using gst_element_factory_make() just like * any other element. Video or image capture mode can be selected using * the #GstCameraBin:mode property and the file to save the capture is @@ -69,12 +58,11 @@ * In both modes, if #GstCameraBin:post-previews is %TRUE, a #GstBuffer * will be post to the #GstBus in a field named 'buffer', in a * 'preview-image' message of type %GST_MESSAGE_ELEMENT. - * </para> - * </refsect2> + * - * <refsect2> - * <title>Customization</title> - * <para> + * + * ## Customization + * * Camerabin provides various customization properties, allowing the user * to set custom filters, selecting the viewfinder sink and formats to * use to encode the captured images/videos. @@ -114,20 +102,16 @@ * of its branches: video capture, image capture, viewfinder and preview. * Check #GstCameraBin:video-filter, #GstCameraBin:image-filter, * #GstCameraBin:viewfinder-filter and #GstCameraBin:preview-filter. - * </para> - * </refsect2> * - * <refsect2> - * <title>Example launch line</title> - * <para> + * ## Example launch line + * * Unfortunately, camerabin can't be really used from gst-launch-1.0, as you * need to send signals to control it. The following pipeline might be able * to show the viewfinder using all the default elements. * |[ * gst-launch-1.0 -v -m camerabin * ]| - * </para> - * </refsect2> + * */ diff --git a/gst/camerabin2/gstdigitalzoom.c b/gst/camerabin2/gstdigitalzoom.c index b936fe455..c7de9a650 100644 --- a/gst/camerabin2/gstdigitalzoom.c +++ b/gst/camerabin2/gstdigitalzoom.c @@ -21,6 +21,7 @@ /** * SECTION:element-digitalzoom + * @title: digitalzoom * * Does digital zooming by cropping and scaling an image. * diff --git a/gst/camerabin2/gstviewfinderbin.c b/gst/camerabin2/gstviewfinderbin.c index 7ff7bd509..1d3da329a 100644 --- a/gst/camerabin2/gstviewfinderbin.c +++ b/gst/camerabin2/gstviewfinderbin.c @@ -18,16 +18,16 @@ */ /** * SECTION:element-gstviewfinderbin + * @title: gstviewfinderbin * * The gstviewfinderbin element is a displaying element for camerabin2. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! viewfinderbin * ]| * Feeds the viewfinderbin with video test data. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/camerabin2/gstwrappercamerabinsrc.c b/gst/camerabin2/gstwrappercamerabinsrc.c index 4c2d8ec12..c1c6dc5ee 100644 --- a/gst/camerabin2/gstwrappercamerabinsrc.c +++ b/gst/camerabin2/gstwrappercamerabinsrc.c @@ -22,6 +22,7 @@ /** * SECTION:element-wrappercamerabinsrc + * @title: wrappercamerabinsrc * * A camera bin src element that wraps a default video source with a single * pad into the 3pad model that camerabin2 expects. diff --git a/gst/coloreffects/gstchromahold.c b/gst/coloreffects/gstchromahold.c index 011c401ee..1ddb4c072 100644 --- a/gst/coloreffects/gstchromahold.c +++ b/gst/coloreffects/gstchromahold.c @@ -23,7 +23,8 @@ /** * SECTION:element-chromahold - * + * @title: chromahold + * * The chromahold element will remove all color information for * all colors except a single one and converts them to grayscale. * diff --git a/gst/coloreffects/gstcoloreffects.c b/gst/coloreffects/gstcoloreffects.c index 5a1aea110..dc26f70dd 100644 --- a/gst/coloreffects/gstcoloreffects.c +++ b/gst/coloreffects/gstcoloreffects.c @@ -19,16 +19,16 @@ /** * SECTION:element-coloreffects + * @title: coloreffects * * Map colors of the video input to a lookup table * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! coloreffects preset=heat ! videoconvert ! * autovideosink * ]| This pipeline shows the effect of coloreffects on a test stream. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/compositor/compositor.c b/gst/compositor/compositor.c index 068d2bb72..fab75c639 100644 --- a/gst/compositor/compositor.c +++ b/gst/compositor/compositor.c @@ -22,6 +22,7 @@ /** * SECTION:element-compositor + * @title: compositor * * Compositor can accept AYUV, ARGB and BGRA video streams. For each of the requested * sink pads it will compare the incoming geometry and framerate to define the @@ -29,40 +30,19 @@ * biggest incoming video stream and the framerate of the fastest incoming one. * * Compositor will do colorspace conversion. - * + * * Individual parameters for each input stream can be configured on the * #GstCompositorPad: * - * <itemizedlist> - * <listitem> - * "xpos": The x-coordinate position of the top-left corner of the picture - * (#gint) - * </listitem> - * <listitem> - * "ypos": The y-coordinate position of the top-left corner of the picture - * (#gint) - * </listitem> - * <listitem> - * "width": The width of the picture; the input will be scaled if necessary - * (#gint) - * </listitem> - * <listitem> - * "height": The height of the picture; the input will be scaled if necessary - * (#gint) - * </listitem> - * <listitem> - * "alpha": The transparency of the picture; between 0.0 and 1.0. The blending - * is a simple copy when fully-transparent (0.0) and fully-opaque (1.0). - * (#gdouble) - * </listitem> - * <listitem> - * "zorder": The z-order position of the picture in the composition - * (#guint) - * </listitem> - * </itemizedlist> + * * "xpos": The x-coordinate position of the top-left corner of the picture (#gint) + * * "ypos": The y-coordinate position of the top-left corner of the picture (#gint) + * * "width": The width of the picture; the input will be scaled if necessary (#gint) + * * "height": The height of the picture; the input will be scaled if necessary (#gint) + * * "alpha": The transparency of the picture; between 0.0 and 1.0. The blending + * is a simple copy when fully-transparent (0.0) and fully-opaque (1.0). (#gdouble) + * * "zorder": The z-order position of the picture in the composition (#guint) * - * <refsect2> - * <title>Sample pipelines</title> + * ## Sample pipelines * |[ * gst-launch-1.0 \ * videotestsrc pattern=1 ! \ @@ -85,7 +65,7 @@ * compositor name=comp ! videoconvert ! ximagesink \ * videotestsrc ! \ * video/x-raw, framerate=\(fraction\)5/1, width=320, height=240 ! comp. - * ]| A pipeline to demostrate bgra comping. (This does not demonstrate alpha blending). + * ]| A pipeline to demostrate bgra comping. (This does not demonstrate alpha blending). * |[ * gst-launch-1.0 videotestsrc pattern=1 ! \ * video/x-raw,format =I420, framerate=\(fraction\)10/1, width=100, height=100 ! \ @@ -103,7 +83,7 @@ * "video/x-raw,format=AYUV,width=800,height=600,framerate=(fraction)10/1" ! \ * timeoverlay ! queue2 ! comp. * ]| A pipeline to demonstrate synchronized compositing (the second stream starts after 3 seconds) - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/debugutils/fpsdisplaysink.c b/gst/debugutils/fpsdisplaysink.c index 00f89a3e7..9774b6a52 100644 --- a/gst/debugutils/fpsdisplaysink.c +++ b/gst/debugutils/fpsdisplaysink.c @@ -19,18 +19,18 @@ /** * SECTION:element-fpsdisplaysink + * @title: fpsdisplaysink * * Can display the current and average framerate as a testoverlay or on stdout. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 videotestsrc ! fpsdisplaysink * gst-launch-1.0 videotestsrc ! fpsdisplaysink text-overlay=false * gst-launch-1.0 filesrc location=video.avi ! decodebin name=d ! queue ! fpsdisplaysink d. ! queue ! fakesink sync=true * gst-launch-1.0 playbin uri=file:///path/to/video.avi video-sink="fpsdisplaysink" audio-sink=fakesink * ]| - * </refsect2> + * */ /* FIXME: * - can we avoid plugging the textoverlay? diff --git a/gst/debugutils/gstchopmydata.c b/gst/debugutils/gstchopmydata.c index c382d3274..bed08befb 100644 --- a/gst/debugutils/gstchopmydata.c +++ b/gst/debugutils/gstchopmydata.c @@ -18,26 +18,25 @@ */ /** * SECTION:element-gstchopmydata + * @title: gstchopmydata * * The chopmydata element takes an incoming stream and chops it up * into randomly sized buffers. Size of outgoing buffers are determined * by the max-size, min-size, and step-size properties. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v audiotestsrc num-buffers=10 ! chopmydata min-size=100 * max-size=200 step-size=2 ! fakesink -v * ]| - * + * * This pipeline will create 10 buffers that are by default 2048 bytes * each (1024 samples each), and chop them up into buffers that range * in size from 100 bytes to 200 bytes, with the restriction that sizes * are a multiple of 2. This restriction is important, because the * default sample size for audiotestsrc is 2 bytes (one channel, 16-bit * audio). - * - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/debugutils/gstdebugspy.c b/gst/debugutils/gstdebugspy.c index fed2db180..ebf050f6d 100644 --- a/gst/debugutils/gstdebugspy.c +++ b/gst/debugutils/gstdebugspy.c @@ -22,16 +22,16 @@ /** * SECTION:element-debugspy + * @title: debugspy * * A spy element that can provide information on buffers going through it, with * bus messages. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -m videotestsrc ! debugspy ! fakesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/debugutils/gsterrorignore.c b/gst/debugutils/gsterrorignore.c index ed42a0505..30e5beee2 100644 --- a/gst/debugutils/gsterrorignore.c +++ b/gst/debugutils/gsterrorignore.c @@ -20,17 +20,18 @@ /** * SECTION:element-errorignore + * @title: errorignore * * Passes through all packets, until it encounters GST_FLOW_ERROR or * GST_FLOW_NOT_NEGOTIATED (configurable). At that point it will unref the - * buffers and return GST_FLOW_OK (configurable) - until the next + * buffers and return GST_FLOW_OK (configurable) - until the next * READY_TO_PAUSED, RECONFIGURE or FLUSH_STOP. - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc ! errorignore ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/debugutils/gstwatchdog.c b/gst/debugutils/gstwatchdog.c index 0a4c12e7b..1824cbf3b 100644 --- a/gst/debugutils/gstwatchdog.c +++ b/gst/debugutils/gstwatchdog.c @@ -19,6 +19,7 @@ */ /** * SECTION:element-gstwatchdog + * @title: watchdog * * The watchdog element watches buffers and events flowing through * a pipeline. If no buffers are seen for a configurable amount of @@ -32,12 +33,11 @@ * This element is currently intended for transcoding pipelines, * although may be useful in other contexts. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v fakesrc ! watchdog ! fakesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/dvbsuboverlay/dvb-sub.c b/gst/dvbsuboverlay/dvb-sub.c index 75965631a..cddb19ec9 100644 --- a/gst/dvbsuboverlay/dvb-sub.c +++ b/gst/dvbsuboverlay/dvb-sub.c @@ -45,6 +45,7 @@ static void dvb_sub_init (void); /** * SECTION:dvb-sub + * @title: GstDvbSub * @short_description: a DVB subtitle parsing class * @stability: Unstable * diff --git a/gst/dvbsuboverlay/dvb-sub.h b/gst/dvbsuboverlay/dvb-sub.h index 7faa31f45..5917e7e49 100644 --- a/gst/dvbsuboverlay/dvb-sub.h +++ b/gst/dvbsuboverlay/dvb-sub.h @@ -68,7 +68,7 @@ typedef struct DVBSubtitleRect { /** * DVBSubtitleWindow - * @version: version + * @version: version * @display_window_flag: window_* are valid * @display_width: assumed width of display * @display_height: assumed height of display diff --git a/gst/dvbsuboverlay/gstdvbsuboverlay.c b/gst/dvbsuboverlay/gstdvbsuboverlay.c index 1723cae92..f28590cd9 100644 --- a/gst/dvbsuboverlay/gstdvbsuboverlay.c +++ b/gst/dvbsuboverlay/gstdvbsuboverlay.c @@ -20,17 +20,17 @@ /** * SECTION:element-dvbsuboverlay + * @title: dvbsuboverlay * * Renders DVB subtitles on top of a video stream. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ FIXME * gst-launch-1.0 -v filesrc location=/path/to/ts ! mpegtsdemux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink \ * d. ! queue ! mpegvideoparse ! mpeg2dec ! videoconvert ! r. \ * d. ! queue ! "subpicture/x-dvb" ! dvbsuboverlay name=r ! videoconvert ! autovideosink * ]| This pipeline demuxes a MPEG-TS file with MPEG2 video, MP3 audio and embedded DVB subtitles and renders the subtitles on top of the video. - * </refsect2> + * */ diff --git a/gst/dvdspu/gstdvdspu.c b/gst/dvdspu/gstdvdspu.c index fb2174890..41c964b0d 100644 --- a/gst/dvdspu/gstdvdspu.c +++ b/gst/dvdspu/gstdvdspu.c @@ -18,15 +18,15 @@ */ /** * SECTION:element-dvdspu + * @title: dvdspu * * DVD sub picture overlay element. - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * FIXME: gst-launch-1.0 ... * ]| FIXME: description for the sample launch pipeline - * </refsect2> + * */ #ifdef HAVE_CONFIG_H # include <config.h> diff --git a/gst/festival/gstfestival.c b/gst/festival/gstfestival.c index 981ec6112..73d492ab8 100644 --- a/gst/festival/gstfestival.c +++ b/gst/festival/gstfestival.c @@ -62,18 +62,18 @@ /** * SECTION:element-festival - * + * @title: festival + * * This element connects to a * <ulink url="http://www.festvox.org/festival/index.html">festival</ulink> * server process and uses it to synthesize speech. Festival need to run already * in server mode, started as <screen>festival --server</screen> - * - * <refsect2> - * <title>Example pipeline</title> + * + * ## Example pipeline * |[ * echo 'Hello G-Streamer!' | gst-launch-1.0 fdsrc fd=0 ! festival ! wavparse ! audioconvert ! alsasink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/fieldanalysis/gstfieldanalysis.c b/gst/fieldanalysis/gstfieldanalysis.c index 2b01b5180..d1ef181a6 100644 --- a/gst/fieldanalysis/gstfieldanalysis.c +++ b/gst/fieldanalysis/gstfieldanalysis.c @@ -43,17 +43,17 @@ /** * SECTION:element-fieldanalysis + * @title: fieldanalysis * * Analyse fields from video buffers to identify whether the buffers are * progressive/telecined/interlaced and, if telecined, the telecine pattern * used. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v uridecodebin uri=/path/to/foo.bar ! fieldanalysis ! deinterlace ! videoconvert ! autovideosink * ]| This pipeline will analyse a video stream with default metrics and thresholds and output progressive frames. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/freeverb/gstfreeverb.c b/gst/freeverb/gstfreeverb.c index 536452e81..f7292e038 100644 --- a/gst/freeverb/gstfreeverb.c +++ b/gst/freeverb/gstfreeverb.c @@ -30,16 +30,16 @@ /** * SECTION:element-freeverb + * @title: freeverb * * Reverberation/room effect. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 audiotestsrc wave=saw ! freeverb ! autoaudiosink * gst-launch-1.0 filesrc location="melo1.ogg" ! decodebin ! audioconvert ! freeverb ! autoaudiosink * ]| - * </refsect2> + * */ /* FIXME: diff --git a/gst/gaudieffects/gstburn.c b/gst/gaudieffects/gstburn.c index ee4487584..bf662270a 100644 --- a/gst/gaudieffects/gstburn.c +++ b/gst/gaudieffects/gstburn.c @@ -46,15 +46,15 @@ /** * SECTION:element-burn + * @title: burn * * Burn adjusts the colors of a video stream in realtime. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! burn ! videoconvert ! autovideosink * ]| This pipeline shows the effect of burn on a test stream - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/gaudieffects/gstchromium.c b/gst/gaudieffects/gstchromium.c index 65ac5b50c..fb55aac25 100644 --- a/gst/gaudieffects/gstchromium.c +++ b/gst/gaudieffects/gstchromium.c @@ -46,15 +46,15 @@ /** * SECTION:element-chromium + * @title: chromium * * Chromium breaks the colors of a video stream in realtime. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! chromium ! videoconvert ! autovideosink * ]| This pipeline shows the effect of chromium on a test stream - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/gaudieffects/gstdilate.c b/gst/gaudieffects/gstdilate.c index 301135d3b..a21ab3d5a 100644 --- a/gst/gaudieffects/gstdilate.c +++ b/gst/gaudieffects/gstdilate.c @@ -46,15 +46,15 @@ /** * SECTION:element-dilate + * @title: dilate * * Dilate adjusts the colors of a video stream in realtime. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! dilate ! videoconvert ! autovideosink * ]| This pipeline shows the effect of dilate on a test stream - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/gaudieffects/gstdodge.c b/gst/gaudieffects/gstdodge.c index eb5019e71..86b5d9d02 100644 --- a/gst/gaudieffects/gstdodge.c +++ b/gst/gaudieffects/gstdodge.c @@ -46,15 +46,15 @@ /** * SECTION:element-dodge + * @title: dodge * * Dodge saturates the colors of a video stream in realtime. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! dodge ! videoconvert ! autovideosink * ]| This pipeline shows the effect of dodge on a test stream - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/gaudieffects/gstexclusion.c b/gst/gaudieffects/gstexclusion.c index 6ebe82526..62445bd06 100644 --- a/gst/gaudieffects/gstexclusion.c +++ b/gst/gaudieffects/gstexclusion.c @@ -46,15 +46,15 @@ /** * SECTION:element-exclusion + * @title: exclusion * * Exclusion saturates the colors of a video stream in realtime. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! exclusion ! videoconvert ! autovideosink * ]| This pipeline shows the effect of exclusion on a test stream - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/gaudieffects/gstgaussblur.c b/gst/gaudieffects/gstgaussblur.c index 9c1e0979b..ff5ca520e 100644 --- a/gst/gaudieffects/gstgaussblur.c +++ b/gst/gaudieffects/gstgaussblur.c @@ -47,15 +47,15 @@ /** * SECTION:element-gaussianblur + * @title: gaussianblur * * Gaussianblur blurs the video stream in realtime. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! gaussianblur ! videoconvert ! autovideosink * ]| This pipeline shows the effect of gaussianblur on a test stream - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/gaudieffects/gstsolarize.c b/gst/gaudieffects/gstsolarize.c index 0710d0fd6..8fc9ff69e 100644 --- a/gst/gaudieffects/gstsolarize.c +++ b/gst/gaudieffects/gstsolarize.c @@ -46,15 +46,15 @@ /** * SECTION:element-solarize + * @title: solarize * * Solarize does a smart inverse in a video stream in realtime. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! solarize ! videoconvert ! autovideosink * ]| This pipeline shows the effect of solarize on a test stream - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/gdp/dataprotocol.c b/gst/gdp/dataprotocol.c index d6534ba94..47b0f7b19 100644 --- a/gst/gdp/dataprotocol.c +++ b/gst/gdp/dataprotocol.c @@ -23,6 +23,7 @@ /** * SECTION:gstdataprotocol + * @title: GstDataProtocol * @short_description: Serialization of caps, buffers and events. * @see_also: #GstCaps, #GstEvent, #GstBuffer * diff --git a/gst/gdp/gstgdpdepay.c b/gst/gdp/gstgdpdepay.c index b7269bd68..4d2093870 100644 --- a/gst/gdp/gstgdpdepay.c +++ b/gst/gdp/gstgdpdepay.c @@ -19,17 +19,17 @@ /** * SECTION:element-gdpdepay + * @title: gdpdepay * @see_also: gdppay * * This element depayloads GStreamer Data Protocol buffers back to deserialized * buffers and events. * - * <refsect2> * |[ * gst-launch-1.0 -v -m filesrc location=test.gdp ! gdpdepay ! xvimagesink * ]| This pipeline plays back a serialized video stream as created in the * example for gdppay. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/gdp/gstgdppay.c b/gst/gdp/gstgdppay.c index 6b55895b0..3546dd86a 100644 --- a/gst/gdp/gstgdppay.c +++ b/gst/gdp/gstgdppay.c @@ -20,17 +20,17 @@ /** * SECTION:element-gdppay + * @title: gdppay * @see_also: gdpdepay * * This element payloads GStreamer buffers and events using the * GStreamer Data Protocol. * - * <refsect2> * |[ * gst-launch-1.0 -v -m videotestsrc num-buffers=50 ! gdppay ! filesink location=test.gdp * ]| This pipeline creates a serialized video stream that can be played back * with the example shown in gdpdepay. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstbulge.c b/gst/geometrictransform/gstbulge.c index 98a7849ba..01e850b12 100644 --- a/gst/geometrictransform/gstbulge.c +++ b/gst/geometrictransform/gstbulge.c @@ -43,17 +43,17 @@ /** * SECTION:element-bulge + * @title: bulge * @see_also: geometrictransform * * Bugle is a geometric image transform element. It adds a protuberance in the * center point. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! bulge ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstcircle.c b/gst/geometrictransform/gstcircle.c index da4f48b0d..b326fd00b 100644 --- a/gst/geometrictransform/gstcircle.c +++ b/gst/geometrictransform/gstcircle.c @@ -49,17 +49,17 @@ /** * SECTION:element-circle + * @title: circle * @see_also: geometrictransform * * Circle is a geometric image transform element. It warps the picture into an * arc shaped form. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! circle ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstdiffuse.c b/gst/geometrictransform/gstdiffuse.c index 03009c724..36e4e62a2 100644 --- a/gst/geometrictransform/gstdiffuse.c +++ b/gst/geometrictransform/gstdiffuse.c @@ -49,17 +49,17 @@ /** * SECTION:element-diffuse + * @title: diffuse * @see_also: geometrictransform * * Diffuse is a geometric image transform element. It diffuses the image by * moving its pixels in random directions. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! diffuse ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstfisheye.c b/gst/geometrictransform/gstfisheye.c index d08069c2c..7150e3844 100644 --- a/gst/geometrictransform/gstfisheye.c +++ b/gst/geometrictransform/gstfisheye.c @@ -43,17 +43,17 @@ /** * SECTION:element-fisheye + * @title: fisheye * @see_also: geometrictransform * * Fisheye is a geometric image transform element. It simulates a fisheye lens * by zooming on the center of the image and compressing the edges. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! fisheye ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstkaleidoscope.c b/gst/geometrictransform/gstkaleidoscope.c index bb1bdfcb8..00f988f72 100644 --- a/gst/geometrictransform/gstkaleidoscope.c +++ b/gst/geometrictransform/gstkaleidoscope.c @@ -49,17 +49,17 @@ /** * SECTION:element-kaleidoscope + * @title: kaleidoscope * @see_also: geometrictransform * * The kaleidscope element applies 'kaleidoscope' geometric transform to the * image. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! kaleidoscope ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstmarble.c b/gst/geometrictransform/gstmarble.c index 689845366..f508d2a51 100644 --- a/gst/geometrictransform/gstmarble.c +++ b/gst/geometrictransform/gstmarble.c @@ -49,17 +49,17 @@ /** * SECTION:element-marble + * @title: marble * @see_also: geometrictransform * * Marble is a geometric image transform element. It applies a marbling effect * to the image. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! marble ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstmirror.c b/gst/geometrictransform/gstmirror.c index df18234d4..31e0a69c1 100644 --- a/gst/geometrictransform/gstmirror.c +++ b/gst/geometrictransform/gstmirror.c @@ -43,17 +43,17 @@ /** * SECTION:element-mirror + * @title: mirror * @see_also: geometrictransform * * Mirror is a geometric transform element. It splits the image into two halves * and reflects one over each other. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! mirror ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstperspective.c b/gst/geometrictransform/gstperspective.c index bafa1fc5d..e4da32fca 100644 --- a/gst/geometrictransform/gstperspective.c +++ b/gst/geometrictransform/gstperspective.c @@ -49,16 +49,16 @@ /** * SECTION:element-perspective + * @title: perspective * @see_also: geometrictransform * * The perspective element applies a 2D perspective transform. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! perspective ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ /* FIXME: suppress warnings for deprecated API such as GValueArray diff --git a/gst/geometrictransform/gstpinch.c b/gst/geometrictransform/gstpinch.c index d68dcd168..798b96586 100644 --- a/gst/geometrictransform/gstpinch.c +++ b/gst/geometrictransform/gstpinch.c @@ -49,16 +49,16 @@ /** * SECTION:element-pinch + * @title: pinch * @see_also: geometrictransform * * Pinch applies a 'pinch' geometric transform to the image. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! pinch ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstrotate.c b/gst/geometrictransform/gstrotate.c index 1e03339bf..d6f65152b 100644 --- a/gst/geometrictransform/gstrotate.c +++ b/gst/geometrictransform/gstrotate.c @@ -49,16 +49,16 @@ /** * SECTION:element-rotate + * @title: rotate * @see_also: geometrictransform * * The rotate element transforms the image by rotating it by a specified angle. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! rotate angle=0.78 ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstsphere.c b/gst/geometrictransform/gstsphere.c index 5f39a4765..90947a865 100644 --- a/gst/geometrictransform/gstsphere.c +++ b/gst/geometrictransform/gstsphere.c @@ -49,16 +49,16 @@ /** * SECTION:element-sphere + * @title: sphere * @see_also: geometrictransform * * The sphere element applies a 'sphere' geometric transform to the image. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! sphere ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstsquare.c b/gst/geometrictransform/gstsquare.c index 4b044f194..14271bd1e 100644 --- a/gst/geometrictransform/gstsquare.c +++ b/gst/geometrictransform/gstsquare.c @@ -43,16 +43,16 @@ /** * SECTION:element-square + * @title: square * @see_also: geometrictransform * * The square element distorts the center part of the image into a square. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! square zoom=100 ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gststretch.c b/gst/geometrictransform/gststretch.c index 61eed37f0..114a0cd22 100644 --- a/gst/geometrictransform/gststretch.c +++ b/gst/geometrictransform/gststretch.c @@ -43,16 +43,16 @@ /** * SECTION:element-stretch + * @title: stretch * @see_also: geometrictransform * * The stretch element stretches the image in a circle around the center point. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! stretch ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gsttunnel.c b/gst/geometrictransform/gsttunnel.c index 94b6070cc..8668a5282 100644 --- a/gst/geometrictransform/gsttunnel.c +++ b/gst/geometrictransform/gsttunnel.c @@ -43,17 +43,17 @@ /** * SECTION:element-tunnel + * @title: tunnel * @see_also: geometrictransform * * Tunnel is a geometric image transform element. It applies a light tunnel * effect. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! tunnel ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gsttwirl.c b/gst/geometrictransform/gsttwirl.c index 962d3ba6d..54ec46b9c 100644 --- a/gst/geometrictransform/gsttwirl.c +++ b/gst/geometrictransform/gsttwirl.c @@ -49,16 +49,16 @@ /** * SECTION:element-twirl + * @title: twirl * @see_also: geometrictransform * * The twirl element twists the image from the center out. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! twirl ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/geometrictransform/gstwaterripple.c b/gst/geometrictransform/gstwaterripple.c index 96af6e8d7..2d39df9db 100644 --- a/gst/geometrictransform/gstwaterripple.c +++ b/gst/geometrictransform/gstwaterripple.c @@ -49,16 +49,16 @@ /** * SECTION:element-waterripple + * @title: waterripple * @see_also: geometrictransform * * The waterripple element creates a water ripple effect on the image. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! waterripple ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/id3tag/gstid3mux.c b/gst/id3tag/gstid3mux.c index 37c7a4908..ccb23f91a 100644 --- a/gst/id3tag/gstid3mux.c +++ b/gst/id3tag/gstid3mux.c @@ -22,6 +22,7 @@ /** * SECTION:element-id3mux + * @title: id3mux * @see_also: #GstID3Demux, #GstTagSetter * * This element adds ID3v2 tags to the beginning of a stream, and ID3v1 tags @@ -34,8 +35,7 @@ * Tags sent by upstream elements will be picked up automatically (and merged * according to the merge mode set via the tag setter interface). * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v filesrc location=foo.ogg ! decodebin ! audioconvert ! id3mux ! filesink location=foo.mp3 * ]| A pipeline that transcodes a file from Ogg/Vorbis to mp3 format with @@ -44,7 +44,7 @@ * |[ * gst-launch-1.0 -m filesrc location=foo.mp3 ! id3demux ! fakesink silent=TRUE * ]| Verify that tags have been written. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/inter/gstinteraudiosink.c b/gst/inter/gstinteraudiosink.c index 19fc5d2e3..3c3063103 100644 --- a/gst/inter/gstinteraudiosink.c +++ b/gst/inter/gstinteraudiosink.c @@ -18,13 +18,13 @@ */ /** * SECTION:element-gstinteraudiosink + * @title: gstinteraudiosink * * The interaudiosink element is an audio sink element. It is used * in connection with a interaudiosrc element in a different pipeline, * similar to intervideosink and intervideosrc. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v audiotestsrc ! queue ! interaudiosink * ]| @@ -34,7 +34,7 @@ * audio. * See the gstintertest.c example in the gst-plugins-bad source code for * more details. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/inter/gstinteraudiosrc.c b/gst/inter/gstinteraudiosrc.c index 10581ba0d..886632e26 100644 --- a/gst/inter/gstinteraudiosrc.c +++ b/gst/inter/gstinteraudiosrc.c @@ -18,21 +18,21 @@ */ /** * SECTION:element-gstinteraudiosrc + * @title: gstinteraudiosrc * * The interaudiosrc element is an audio source element. It is used * in connection with a interaudiosink element in a different pipeline. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v interaudiosrc ! queue ! autoaudiosink * ]| - * + * * The interaudiosrc element cannot be used effectively with gst-launch-1.0, * as it requires a second pipeline in the application to send audio. * See the gstintertest.c example in the gst-plugins-bad source code for * more details. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/inter/gstintersubsink.c b/gst/inter/gstintersubsink.c index 038acbbd0..62a627491 100644 --- a/gst/inter/gstintersubsink.c +++ b/gst/inter/gstintersubsink.c @@ -18,21 +18,21 @@ */ /** * SECTION:element-gstintersubsink + * @title: gstintersubsink * * The intersubsink element is a subtitle sink element. It is used * in connection with a intersubsrc element in a different pipeline. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v ... ! intersubsink * ]| - * + * * The intersubsink element cannot be used effectively with gst-launch-1.0, * as it requires a second pipeline in the application to send audio. * See the gstintertest.c example in the gst-plugins-bad source code for * more details. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/inter/gstintersubsrc.c b/gst/inter/gstintersubsrc.c index ac49265f7..2ca1a523a 100644 --- a/gst/inter/gstintersubsrc.c +++ b/gst/inter/gstintersubsrc.c @@ -18,22 +18,22 @@ */ /** * SECTION:element-gstintersubsrc + * @title: gstintersubsrc * * The intersubsrc element is a subtitle source element. It is used * in connection with a intersubsink element in a different pipeline, * similar to interaudiosink and interaudiosrc. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v intersubsrc ! kateenc ! oggmux ! filesink location=out.ogv * ]| - * + * * The intersubsrc element cannot be used effectively with gst-launch-1.0, * as it requires a second pipeline in the application to send subtitles. * See the gstintertest.c example in the gst-plugins-bad source code for * more details. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/inter/gstintervideosink.c b/gst/inter/gstintervideosink.c index edee11d86..b73a554c2 100644 --- a/gst/inter/gstintervideosink.c +++ b/gst/inter/gstintervideosink.c @@ -18,22 +18,22 @@ */ /** * SECTION:element-gstintervideosink + * @title: gstintervideosink * * The intervideosink element is a video sink element. It is used * in connection with an intervideosrc element in a different pipeline, * similar to interaudiosink and interaudiosrc. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! intervideosink * ]| - * + * * The intervideosink element cannot be used effectively with gst-launch-1.0, * as it requires a second pipeline in the application to send video to. * See the gstintertest.c example in the gst-plugins-bad source code for * more details. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/inter/gstintervideosrc.c b/gst/inter/gstintervideosrc.c index fd41d5091..1c20b4219 100644 --- a/gst/inter/gstintervideosrc.c +++ b/gst/inter/gstintervideosrc.c @@ -18,20 +18,20 @@ */ /** * SECTION:element-gstintervideosrc + * @title: gstintervideosrc * * The intervideosrc element is a video source element. It is used * in connection with a intervideosink element in a different pipeline, * similar to interaudiosink and interaudiosrc. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v intervideosrc ! queue ! xvimagesink * ]| - * + * * The intersubsrc element cannot be used effectively with gst-launch-1.0, * as it requires a second pipeline in the application to send subtitles. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/interlace/gstinterlace.c b/gst/interlace/gstinterlace.c index bf140b551..e0ff1aa4b 100644 --- a/gst/interlace/gstinterlace.c +++ b/gst/interlace/gstinterlace.c @@ -19,14 +19,14 @@ */ /** * SECTION:element-interlace + * @title: interlace * * The interlace element takes a non-interlaced raw video stream as input, * creates fields out of each frame, then combines fields into interlaced * frames to output as an interlaced video stream. It can also produce * telecined streams from progressive input. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc pattern=ball ! interlace ! xvimagesink * ]| @@ -35,7 +35,7 @@ * |[ * gst-launch-1.0 -v filesrc location=/path/to/file ! decodebin ! videorate ! * videoscale ! video/x-raw,format=\(string\)I420,width=720,height=480, - * framerate=60000/1001,pixel-aspect-ratio=11/10 ! + * framerate=60000/1001,pixel-aspect-ratio=11/10 ! * interlace top-field-first=false ! autovideosink * ]| * This pipeline converts a progressive video stream into an interlaced @@ -49,7 +49,7 @@ * This pipeline converts a 24 frames per second progressive film stream into a * 30000/1001 2:3:2:3... pattern telecined stream suitable for displaying film * content on NTSC. - * </refsect2> + * */ diff --git a/gst/ivtc/gstcombdetect.c b/gst/ivtc/gstcombdetect.c index 210d39cca..d55ab5b7c 100644 --- a/gst/ivtc/gstcombdetect.c +++ b/gst/ivtc/gstcombdetect.c @@ -18,18 +18,18 @@ */ /** * SECTION:element-gstcombdetect + * @title: gstcombdetect * * The combdetect element detects if combing artifacts are present in * a raw video stream, and if so, marks them with a zebra stripe * pattern. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v filesrc location=file.mov ! decodebin ! combdetect ! * xvimagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/ivtc/gstivtc.c b/gst/ivtc/gstivtc.c index 09b02a559..407fb2f30 100644 --- a/gst/ivtc/gstivtc.c +++ b/gst/ivtc/gstivtc.c @@ -19,13 +19,13 @@ */ /** * SECTION:element-gstivtc + * @title: gstivtc * * The ivtc element is an inverse telecine filter. It takes interlaced * video that was created from progressive content using a telecine * filter, and reconstructs the original progressive content. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc pattern=ball ! video/x-raw,framerate=24/1 ! * interlace ! @@ -36,7 +36,7 @@ * converts it to a 60 fields per second interlaced stream. Then the * stream is inversed telecine'd back to 24 fps, yielding approximately * the original videotestsrc content. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/jp2kdecimator/gstjp2kdecimator.c b/gst/jp2kdecimator/gstjp2kdecimator.c index 75ee74605..35c506cc9 100644 --- a/gst/jp2kdecimator/gstjp2kdecimator.c +++ b/gst/jp2kdecimator/gstjp2kdecimator.c @@ -19,11 +19,11 @@ */ /** * SECTION:element-gstjp2kdecimator + * @title: gstjp2kdecimator * * The jp2kdecimator element removes information from JPEG2000 images without reencoding. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc num-buffers=1 ! jp2kenc ! \ * gstjp2kdecimator max-decomposition-levels=2 ! jp2kdec ! \ @@ -31,7 +31,7 @@ * ]| * This pipelines encodes a test image to JPEG2000, only keeps 3 decomposition levels * decodes the decimated image again and shows it on the screen. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/jpegformat/gstjifmux.c b/gst/jpegformat/gstjifmux.c index b1ba175ec..517c994ec 100644 --- a/gst/jpegformat/gstjifmux.c +++ b/gst/jpegformat/gstjifmux.c @@ -22,20 +22,20 @@ /** * SECTION:element-jifmux + * @title: jifmux * @short_description: JPEG interchange format writer * * Writes a JPEG image as JPEG/EXIF or JPEG/JFIF including various metadata. The * jpeg image received on the sink pad should be minimal (e.g. should not * contain metadata already). * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc num-buffers=1 ! jpegenc ! jifmux ! filesink location=... * ]| * The above pipeline renders a frame, encodes to jpeg, adds metadata and writes * it to disk. - * </refsect2> + * */ /* jpeg interchange format: diff --git a/gst/jpegformat/gstjpegparse.c b/gst/jpegformat/gstjpegparse.c index 6978b57cf..dd684d79f 100644 --- a/gst/jpegformat/gstjpegparse.c +++ b/gst/jpegformat/gstjpegparse.c @@ -23,6 +23,7 @@ /** * SECTION:element-jpegparse + * @title: jpegparse * @short_description: JPEG parser * * Parses a JPEG stream into JPEG images. It looks for EOI boundaries to @@ -30,14 +31,13 @@ * image header searching for image properties such as width and height * among others. Jpegparse can also extract metadata (e.g. xmp). * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v souphttpsrc location=... ! jpegparse ! matroskamux ! filesink location=... * ]| * The above pipeline fetches a motion JPEG stream from an IP camera over * HTTP and stores it in a matroska file. - * </refsect2> + * */ /* FIXME: output plain JFIF APP marker only. This provides best code reuse. * JPEG decoders would not need to handle this part anymore. Also when remuxing diff --git a/gst/midi/midiparse.c b/gst/midi/midiparse.c index d58ea3f49..dc3c078ad 100644 --- a/gst/midi/midiparse.c +++ b/gst/midi/midiparse.c @@ -21,18 +21,18 @@ /** * SECTION:element-midiparse + * @title: midiparse * @see_also: fluiddec * * This element parses midi-files into midi events. You would need a midi * renderer such as fluidsynth to convert the events into raw samples. * - * <refsect2> - * <title>Example pipeline</title> + * ## Example pipeline * |[ * gst-launch-1.0 filesrc location=song.mid ! midiparse ! fluiddec ! pulsesink * ]| This example pipeline will parse the midi and render to raw audio which is * played via pulseaudio. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/mxf/mxfdemux.c b/gst/mxf/mxfdemux.c index 54ceec1df..5c176ac0e 100644 --- a/gst/mxf/mxfdemux.c +++ b/gst/mxf/mxfdemux.c @@ -19,15 +19,15 @@ /** * SECTION:element-mxfdemux + * @title: mxfdemux * * mxfdemux demuxes an MXF file into the different contained streams. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v filesrc location=/path/to/mxf ! mxfdemux ! audioconvert ! autoaudiosink * ]| This pipeline demuxes an MXF file and outputs one of the contained raw audio streams. - * </refsect2> + * */ /* TODO: diff --git a/gst/mxf/mxfmux.c b/gst/mxf/mxfmux.c index 7de12ac7a..4f6632107 100644 --- a/gst/mxf/mxfmux.c +++ b/gst/mxf/mxfmux.c @@ -19,15 +19,15 @@ /** * SECTION:element-mxfmux + * @title: mxfmux * * mxfmux muxes different streams into an MXF file. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v filesrc location=/path/to/audio ! decodebin ! queue ! mxfmux name=m ! filesink location=file.mxf filesrc location=/path/to/video ! decodebin ! queue ! m. * ]| This pipeline muxes an audio and video file into a single MXF file. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/pcapparse/gstirtspparse.c b/gst/pcapparse/gstirtspparse.c index f6b7aceea..a1d5989ff 100644 --- a/gst/pcapparse/gstirtspparse.c +++ b/gst/pcapparse/gstirtspparse.c @@ -20,6 +20,7 @@ */ /** * SECTION:element-irtspparse + * @title: irtspparse * @short_description: Interleaved RTSP parser * @see_also: #GstPcapParse * @@ -27,14 +28,13 @@ * so-called "channels" from received interleaved (TCP) RTSP data * (typically extracted from some network capture). * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 filesrc location=h264crasher.pcap ! pcapparse ! irtspparse * ! rtph264depay ! ffdec_h264 ! fakesink * ]| Read from a pcap dump file using filesrc, extract the raw TCP packets, * depayload and decode them. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/pcapparse/gstpcapparse.c b/gst/pcapparse/gstpcapparse.c index 4ffa8c1ac..6bd293a75 100644 --- a/gst/pcapparse/gstpcapparse.c +++ b/gst/pcapparse/gstpcapparse.c @@ -19,20 +19,20 @@ /** * SECTION:element-pcapparse + * @title: pcapparse * * Extracts payloads from Ethernet-encapsulated IP packets. * Use #GstPcapParse:src-ip, #GstPcapParse:dst-ip, * #GstPcapParse:src-port and #GstPcapParse:dst-port to restrict which packets * should be included. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 filesrc location=h264crasher.pcap ! pcapparse ! rtph264depay * ! ffdec_h264 ! fakesink * ]| Read from a pcap dump file using filesrc, extract the raw UDP packets, * depayload and decode them. - * </refsect2> + * */ /* TODO: diff --git a/gst/pnm/gstpnmdec.c b/gst/pnm/gstpnmdec.c index 1b5ea5117..5b624bed4 100644 --- a/gst/pnm/gstpnmdec.c +++ b/gst/pnm/gstpnmdec.c @@ -19,15 +19,15 @@ /** * SECTION:element-pnmdec + * @title: pnmdec * * Decodes pnm images. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 filesrc location=test.pnm ! pnmdec ! videoconvert ! autovideosink * ]| The above pipeline reads a pnm file and renders it to the screen. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/pnm/gstpnmenc.c b/gst/pnm/gstpnmenc.c index 2c45191cd..267384733 100644 --- a/gst/pnm/gstpnmenc.c +++ b/gst/pnm/gstpnmenc.c @@ -19,17 +19,17 @@ /** * SECTION:element-pnmenc + * @title: pnmenc * * Encodes pnm images. This plugin supports both raw and ASCII encoding. * To enable ASCII encoding, set the parameter ascii to TRUE. If you omit * the parameter or set it to FALSE, the output will be raw encoded. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc num_buffers=1 ! videoconvert ! "video/x-raw,format=GRAY8" ! pnmenc ascii=true ! filesink location=test.pnm * ]| The above pipeline writes a test pnm file (ASCII encoding). - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/rawparse/gstaudioparse.c b/gst/rawparse/gstaudioparse.c index 27884fd1c..e542d46d1 100644 --- a/gst/rawparse/gstaudioparse.c +++ b/gst/rawparse/gstaudioparse.c @@ -20,10 +20,11 @@ */ /** * SECTION:element-audioparse + * @title: audioparse * * Converts a byte stream into audio frames. * - * <note>This element is deprecated. Use #GstRawAudioParse instead.</note> + * This element is deprecated. Use #GstRawAudioParse instead. */ #ifdef HAVE_CONFIG_H diff --git a/gst/rawparse/gstvideoparse.c b/gst/rawparse/gstvideoparse.c index 9f5b19545..3a51ba1ce 100644 --- a/gst/rawparse/gstvideoparse.c +++ b/gst/rawparse/gstvideoparse.c @@ -21,10 +21,11 @@ */ /** * SECTION:element-videoparse + * @title: videoparse * * Converts a byte stream into video frames. * - * <note>This element is deprecated. Use #GstRawVideoParse instead.</note> + * > This element is deprecated. Use #GstRawVideoParse instead. */ #ifdef HAVE_CONFIG_H diff --git a/gst/removesilence/gstremovesilence.c b/gst/removesilence/gstremovesilence.c index 98456ff5c..3af56958e 100644 --- a/gst/removesilence/gstremovesilence.c +++ b/gst/removesilence/gstremovesilence.c @@ -20,15 +20,15 @@ /** * SECTION:element-removesilence + * @title: removesilence * * Removes all silence periods from an audio stream, dropping silence buffers. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v -m filesrc location="audiofile" ! decodebin ! removesilence remove=true ! wavenc ! filesink location=without_audio.wav * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/sdp/gstsdpdemux.c b/gst/sdp/gstsdpdemux.c index 6e176c0d2..4fc316d99 100644 --- a/gst/sdp/gstsdpdemux.c +++ b/gst/sdp/gstsdpdemux.c @@ -18,27 +18,27 @@ */ /** * SECTION:element-sdpdemux + * @title: sdpdemux * * sdpdemux currently understands SDP as the input format of the session description. * For each stream listed in the SDP a new stream_\%u pad will be created * with caps derived from the SDP media description. This is a caps of mime type * "application/x-rtp" that can be connected to any available RTP depayloader - * element. - * + * element. + * * sdpdemux will internally instantiate an RTP session manager element * that will handle the RTCP messages to and from the server, jitter removal, - * packet reordering along with providing a clock for the pipeline. - * - * sdpdemux acts like a live element and will therefore only generate data in the + * packet reordering along with providing a clock for the pipeline. + * + * sdpdemux acts like a live element and will therefore only generate data in the * PLAYING state. - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch-1.0 souphttpsrc location=http://some.server/session.sdp ! sdpdemux ! fakesink * ]| Establish a connection to an HTTP server that contains an SDP session description * that gets parsed by sdpdemux and send the raw RTP packets to a fakesink. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/siren/gstsirendec.c b/gst/siren/gstsirendec.c index 78338ae2b..045bcd668 100644 --- a/gst/siren/gstsirendec.c +++ b/gst/siren/gstsirendec.c @@ -21,6 +21,7 @@ */ /** * SECTION:element-sirendec + * @title: sirendec * * This decodes audio buffers from the Siren 16 codec (a 16khz extension of * G.722.1) that is meant to be compatible with the Microsoft Windows Live diff --git a/gst/siren/gstsirenenc.c b/gst/siren/gstsirenenc.c index 0be5cd370..b54f29c5e 100644 --- a/gst/siren/gstsirenenc.c +++ b/gst/siren/gstsirenenc.c @@ -21,6 +21,7 @@ */ /** * SECTION:element-sirenenc + * @title: sirenenc * * This encodes audio buffers into the Siren 16 codec (a 16khz extension of * G.722.1) that is meant to be compatible with the Microsoft Windows Live diff --git a/gst/speed/gstspeed.c b/gst/speed/gstspeed.c index 7518de28a..8d90518a8 100644 --- a/gst/speed/gstspeed.c +++ b/gst/speed/gstspeed.c @@ -20,20 +20,20 @@ /** * SECTION:element-speed + * @title: speed * * Plays an audio stream at a different speed (by resampling the audio). - * + * * Do not use this element. Either use the 'pitch' element, or do a seek with * a non-1.0 rate parameter, this will have the same effect as using the speed * element (but relies on the decoder/demuxer to handle this correctly, also * requires a fairly up-to-date gst-plugins-base, as of February 2007). - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch-1.0 filesrc location=test.ogg ! decodebin ! audioconvert ! speed speed=1.5 ! audioconvert ! audioresample ! autoaudiosink * ]| Plays an .ogg file at 1.5x speed. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/stereo/gststereo.c b/gst/stereo/gststereo.c index 35990104a..72539d6e1 100644 --- a/gst/stereo/gststereo.c +++ b/gst/stereo/gststereo.c @@ -23,15 +23,15 @@ */ /** * SECTION:element-stereo + * @title: stereo * * Create a wide stereo effect. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v filesrc location=sine.ogg ! oggdemux ! vorbisdec ! audioconvert ! stereo ! audioconvert ! audioresample ! alsasink * ]| Play an Ogg/Vorbis file. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/timecode/gstavwait.c b/gst/timecode/gstavwait.c index 780ee80df..9ee73ed57 100644 --- a/gst/timecode/gstavwait.c +++ b/gst/timecode/gstavwait.c @@ -23,6 +23,7 @@ /** * SECTION:element-avwait + * @title: avwait * * This element will drop all buffers until a specific timecode or running * time has been reached. It will then pass-through both audio and video, @@ -31,11 +32,10 @@ * the video). In the "audio-after-video" mode, it only drops audio buffers * until video has started. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 filesrc location="my_file" ! decodebin name=d ! "audio/x-raw" ! avwait name=l target-timecode-str="00:00:04:00" ! autoaudiosink d. ! "video/x-raw" ! timecodestamper ! l. l. ! queue ! timeoverlay time-mode=time-code ! autovideosink - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/timecode/gsttimecodestamper.c b/gst/timecode/gsttimecodestamper.c index 81860d0dc..b3c513ca0 100644 --- a/gst/timecode/gsttimecodestamper.c +++ b/gst/timecode/gsttimecodestamper.c @@ -22,18 +22,18 @@ /** * SECTION:element-timecodestamper + * @title: timecodestamper * @short_description: Attach a timecode into incoming video frames * * This element attaches a timecode into every incoming video frame. It starts * counting from the stream time of each segment start, which it converts into * a timecode. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc ! timecodestamper ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/videofilters/gstscenechange.c b/gst/videofilters/gstscenechange.c index 5adf3ce40..4fef2ac2a 100644 --- a/gst/videofilters/gstscenechange.c +++ b/gst/videofilters/gstscenechange.c @@ -18,6 +18,7 @@ */ /** * SECTION:element-gstscenechange + * @title: gstscenechange * * The scenechange element detects scene changes (also known as shot * changes) in a video stream, and sends a signal when this occurs. @@ -32,13 +33,12 @@ * * The scenechange element does not work with compressed video. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v filesrc location=some_file.ogv ! decodebin ! * scenechange ! theoraenc ! fakesink * ]| - * </refsect2> + * */ /* * The algorithm used for scene change detection is a modification diff --git a/gst/videofilters/gstvideodiff.c b/gst/videofilters/gstvideodiff.c index 457ebd1d8..1181ad3c3 100644 --- a/gst/videofilters/gstvideodiff.c +++ b/gst/videofilters/gstvideodiff.c @@ -18,16 +18,16 @@ */ /** * SECTION:element-gstvideodiff + * @title: gstvideodiff * * The videodiff element highlights the difference between a frame and its * previous on the luma plane. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc pattern=ball ! videodiff ! videoconvert ! autovideosink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/videofilters/gstzebrastripe.c b/gst/videofilters/gstzebrastripe.c index 81f58b874..983f02d52 100644 --- a/gst/videofilters/gstzebrastripe.c +++ b/gst/videofilters/gstzebrastripe.c @@ -18,6 +18,7 @@ */ /** * SECTION:element-gstzebrastripe + * @title: gstzebrastripe * * The zebrastripe element marks areas of images in a video stream * that are brighter than a threshold with a diagonal zebra stripe @@ -27,8 +28,7 @@ * threshold setting of 70 is often used to properly adjust skin * tones. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc ! zebrastripe ! xvimagesink * ]| @@ -39,7 +39,7 @@ * property setting can be calculated from IRE by using the formula * percent = (IRE * 1.075) - 7.5. Note that 100 IRE corresponds to * 100 %, and 70 IRE corresponds to 68 %. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/videoframe_audiolevel/gstvideoframe-audiolevel.c b/gst/videoframe_audiolevel/gstvideoframe-audiolevel.c index 46fda779e..bd5b43db5 100644 --- a/gst/videoframe_audiolevel/gstvideoframe-audiolevel.c +++ b/gst/videoframe_audiolevel/gstvideoframe-audiolevel.c @@ -24,16 +24,16 @@ /** * SECTION:element-videoframe-audiolevel + * @title: videoframe-audiolevel * * This element acts like a synchronized audio/video "level". It gathers * all audio buffers sent between two video frames, and then sends a message * that contains the RMS value of all samples for these buffers. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -m filesrc location="file.mkv" ! decodebin name=d ! "audio/x-raw" ! videoframe-audiolevel name=l ! autoaudiosink d. ! "video/x-raw" ! l. l. ! queue ! autovideosink ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/videoparsers/gstdiracparse.c b/gst/videoparsers/gstdiracparse.c index c674cb835..219f584a7 100644 --- a/gst/videoparsers/gstdiracparse.c +++ b/gst/videoparsers/gstdiracparse.c @@ -18,16 +18,16 @@ */ /** * SECTION:element-gstdiracparse + * @title: gstdiracparse * * The gstdiracparse element does FIXME stuff. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v fakesrc ! gstdiracparse ! FIXME ! fakesink * ]| * FIXME Describe what the pipeline does. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/videosignal/gstsimplevideomark.c b/gst/videosignal/gstsimplevideomark.c index 2a527c32e..ae4d0c02a 100644 --- a/gst/videosignal/gstsimplevideomark.c +++ b/gst/videosignal/gstsimplevideomark.c @@ -18,27 +18,27 @@ */ /** * SECTION:element-simplevideomark + * @title: simplevideomark * @see_also: #GstVideoDetect * * This plugin produces #GstSimpleVideoMark::pattern-count squares in the bottom left - * corner of the video frames. The squares have a width and height of + * corner of the video frames. The squares have a width and height of * respectively #GstSimpleVideoMark:pattern-width and #GstSimpleVideoMark:pattern-height. * Even squares will be black and odd squares will be white. - * + * * After writing the pattern, #GstSimpleVideoMark:pattern-data-count squares after the * pattern squares are produced as the bitarray given in * #GstSimpleVideoMark:pattern-data. 1 bits will produce white squares and 0 bits will * produce black squares. - * + * * The element can be enabled with the #GstSimpleVideoMark:enabled property. It is * mostly used together with the #GstVideoDetect plugin. - * - * <refsect2> - * <title>Example launch line</title> + * + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc ! simplevideomark ! videoconvert ! ximagesink * ]| Add the default black/white squares at the bottom left of the video frames. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/videosignal/gstsimplevideomarkdetect.c b/gst/videosignal/gstsimplevideomarkdetect.c index 1e95c8155..d58cff2e2 100644 --- a/gst/videosignal/gstsimplevideomarkdetect.c +++ b/gst/videosignal/gstsimplevideomarkdetect.c @@ -18,78 +18,46 @@ */ /** * SECTION:element-simplevideomarkdetect + * @title: simplevideomarkdetect * @see_also: #GstVideoMark * * This plugin detects #GstSimpleVideoMarkDetect:pattern-count squares in the bottom left * corner of the video frames. The squares have a width and height of * respectively #GstSimpleVideoMarkDetect:pattern-width and #GstSimpleVideoMarkDetect:pattern-height. * Even squares must be black and odd squares must be white. - * + * * When the pattern has been found, #GstSimpleVideoMarkDetect:pattern-data-count squares * after the pattern squares are read as a bitarray. White squares represent a 1 * bit and black squares a 0 bit. The bitarray will will included in the element * message that is posted (see below). - * + * * After the pattern has been found and the data pattern has been read, an - * element message called <classname>"GstSimpleVideoMarkDetect"</classname> will + * element message called `GstSimpleVideoMarkDetect` will * be posted on the bus. If the pattern is no longer found in the frame, the * same element message is posted with the have-pattern field set to #FALSE. * The message is only posted if the #GstSimpleVideoMarkDetect:message property is #TRUE. - * + * * The message's structure contains these fields: - * <itemizedlist> - * <listitem> - * <para> - * #gboolean - * <classname>"have-pattern"</classname>: - * if the pattern was found. This field will be set to #TRUE for as long as + * + * * #gboolean`have-pattern`: if the pattern was found. This field will be set to #TRUE for as long as * the pattern was found in the frame and set to FALSE for the first frame * that does not contain the pattern anymore. - * </para> - * </listitem> - * <listitem> - * <para> - * #GstClockTime - * <classname>"timestamp"</classname>: - * the timestamp of the buffer that triggered the message. - * </para> - * </listitem> - * <listitem> - * <para> - * #GstClockTime - * <classname>"stream-time"</classname>: - * the stream time of the buffer. - * </para> - * </listitem> - * <listitem> - * <para> - * #GstClockTime - * <classname>"running-time"</classname>: - * the running_time of the buffer. - * </para> - * </listitem> - * <listitem> - * <para> - * #GstClockTime - * <classname>"duration"</classname>: - * the duration of the buffer. - * </para> - * </listitem> - * <listitem> - * <para> - * #guint64 - * <classname>"data"</classname>: - * the data-pattern found after the pattern or 0 when have-signal is #FALSE. - * </para> - * </listitem> - * </itemizedlist> - * - * <refsect2> - * <title>Example launch line</title> + * + * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message. + * + * * #GstClockTime `stream-time`: the stream time of the buffer. + * + * * #GstClockTime `running-time`: the running_time of the buffer. + * + * * #GstClockTime `duration`: the duration of the buffer. + * + * * #guint64 `data`: the data-pattern found after the pattern or 0 when have-signal is #FALSE. + * + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc ! simplevideomarkdetect ! videoconvert ! ximagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/videosignal/gstvideoanalyse.c b/gst/videosignal/gstvideoanalyse.c index ea52bdf7d..2723c14fd 100644 --- a/gst/videosignal/gstvideoanalyse.c +++ b/gst/videosignal/gstvideoanalyse.c @@ -18,63 +18,31 @@ */ /** * SECTION:element-videoanalyse + * @title: videoanalyse * * This plugin analyses every video frame and if the #GstVideoAnalyse:message * property is #TRUE, posts an element message with video statistics called - * <classname>"GstVideoAnalyse"</classname>. + * `GstVideoAnalyse`. * * The message's structure contains these fields: - * <itemizedlist> - * <listitem> - * <para> - * #GstClockTime - * <classname>"timestamp"</classname>: - * the timestamp of the buffer that triggered the message. - * </para> - * </listitem> - * <listitem> - * <para> - * #GstClockTime - * <classname>"stream-time"</classname>: - * the stream time of the buffer. - * </para> - * </listitem> - * <listitem> - * <para> - * #GstClockTime - * <classname>"running-time"</classname>: - * the running_time of the buffer. - * </para> - * </listitem> - * <listitem> - * <para> - * #GstClockTime - * <classname>"duration"</classname>: - * the duration of the buffer. - * </para> - * </listitem> - * <listitem> - * <para> - * #gdouble - * <classname>"luma-average"</classname>: - * the average brightness of the frame. Range: 0.0-1.0 - * </para> - * </listitem> - * <listitem> - * <para> - * #gdouble - * <classname>"luma-variance"</classname>: - * the brightness variance of the frame. - * </para> - * </listitem> - * </itemizedlist> - * - * <refsect2> - * <title>Example launch line</title> + * + * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message. + * + * * #GstClockTime `stream-time`: the stream time of the buffer. + * + * * #GstClockTime `running-time`: the running_time of the buffer. + * + * * #GstClockTime`duration`:the duration of the buffer. + * + * * #gdouble`luma-average`: the average brightness of the frame. Range: 0.0-1.0 + * + * * #gdouble`luma-variance`: the brightness variance of the frame. + * + * ## Example launch line * |[ * gst-launch-1.0 -m videotestsrc ! videoanalyse ! videoconvert ! ximagesink - * ]| This pipeline emits messages to the console for each frame that has been analysed. - * </refsect2> + * ]| This pipeline emits messages to the console for each frame that has been analysed. + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/y4m/gsty4mdec.c b/gst/y4m/gsty4mdec.c index 30966b0cc..687138154 100644 --- a/gst/y4m/gsty4mdec.c +++ b/gst/y4m/gsty4mdec.c @@ -18,15 +18,15 @@ */ /** * SECTION:element-gsty4mdec + * @title: gsty4mdec * * The gsty4mdec element decodes uncompressed video in YUV4MPEG format. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v filesrc location=file.y4m ! y4mdec ! xvimagesink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/gst/yadif/gstyadif.c b/gst/yadif/gstyadif.c index 2611c644f..0056f7845 100644 --- a/gst/yadif/gstyadif.c +++ b/gst/yadif/gstyadif.c @@ -18,7 +18,8 @@ * Boston, MA 02110-1335, USA. */ /** - * SECTION:element-gstyadif + * SECTION:element-yadif + * @title: yadif * * The yadif element deinterlaces video, using the YADIF deinterlacing * filter copied from Libav. This element only handles the simple case @@ -26,14 +27,13 @@ * inverse telecine and deinterlace cases that are handled by the * deinterlace element. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v videotestsrc pattern=ball ! interlace ! yadif ! xvimagesink * ]| * This pipeline creates an interlaced test pattern, and then deinterlaces * it using the yadif filter. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/sys/androidmedia/gstahcsrc.c b/sys/androidmedia/gstahcsrc.c index 2fa1d6f24..cb23be2e8 100644 --- a/sys/androidmedia/gstahcsrc.c +++ b/sys/androidmedia/gstahcsrc.c @@ -21,6 +21,7 @@ /** * SECTION:element-ahcsrc + * @title: ahcsrc * * ahcsrc can be used to capture video from android devices. It uses the * android.hardware.Camera Java API to capture from the system's cameras. @@ -31,7 +32,7 @@ * so it can be loaded into the virtual machine. * In order for it to work, an environment variable must be set to a writable * directory. - * The source will look for the environment variable “TMP” which must contain + * The source will look for the environment variable “TMPâ€� which must contain * the absolute path to a writable directory. * It can be retreived using the following Java code : * |[ @@ -40,28 +41,24 @@ * Where the @context variable is an object of type android.content.Context * (including its subclasses android.app.Activity or android.app.Application). * Another optional environment variable can be set for pointing to the - * optimized dex classes directory. If the environment variable “DEX” is - * available, it will be used, otherwise, the directory in the “TMP” environment + * optimized dex classes directory. If the environment variable “DEXâ€� is + * available, it will be used, otherwise, the directory in the “TMPâ€� environment * variable will be used for the optimized dex directory. * The system dex directory can be obtained using the following Java code : * |[ - * context.getDir(“dex”, 0).getAbsolutePath(); + * context.getDir("dex", 0).getAbsolutePath(); * ]| * - * <note> - * Those environment variable must be set before gst_init is called from - * the native code. - * </note> + * > Those environment variable must be set before gst_init is called from + * > the native code. * - * <note> - * If the “TMP” environment variable is not available or the directory is not - * writable or any other issue happens while trying to load the embedded jar - * file, then the source will fallback on trying to load the class directly - * from the running application. - * The file com/gstreamer/GstAhcCallback.java in the source's directory can be - * copied into the Android application so it can be loaded at runtime - * as a fallback mechanism. - * </note> + * > If the "TMP" environment variable is not available or the directory is not + * > writable or any other issue happens while trying to load the embedded jar + * > file, then the source will fallback on trying to load the class directly + * > from the running application. + * > The file com/gstreamer/GstAhcCallback.java in the source's directory can be + * > copied into the Android application so it can be loaded at runtime + * > as a fallback mechanism. * */ diff --git a/sys/androidmedia/gstahssrc.c b/sys/androidmedia/gstahssrc.c index 88f8282d6..c71637833 100644 --- a/sys/androidmedia/gstahssrc.c +++ b/sys/androidmedia/gstahssrc.c @@ -18,18 +18,18 @@ * Boston, MA 02110-1301, USA. */ /** - * SECTION:element-gstahssrc + * SECTION:element-ahssrc + * @title: gstahssrc * * The ahssrc element reads data from Android device sensors * (android.hardware.Sensor). * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch -v ahssrc ! fakesink * ]| * Push Android sensor data into a fakesink. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/sys/applemedia/atdec.c b/sys/applemedia/atdec.c index b14891fd8..a1a2369b7 100644 --- a/sys/applemedia/atdec.c +++ b/sys/applemedia/atdec.c @@ -17,17 +17,17 @@ * Boston, MA 02110-1335, USA. */ /** - * SECTION:element-gstatdec + * SECTION:element-atdec + * @title: atdec * * AudioToolbox based decoder. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v filesrc location=file.mov ! qtdemux ! queue ! aacparse ! atdec ! autoaudiosink * ]| * Decode aac audio from a mov file - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/sys/applemedia/vtdec.c b/sys/applemedia/vtdec.c index 472e39eb4..d9c943d6a 100644 --- a/sys/applemedia/vtdec.c +++ b/sys/applemedia/vtdec.c @@ -18,17 +18,17 @@ * Boston, MA 02110-1335, USA. */ /** - * SECTION:element-gstvtdec + * SECTION:element-vtdec + * @title: gstvtdec * * Apple VideoToolbox based decoder. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v filesrc location=file.mov ! qtdemux ! queue ! h264parse ! vtdec ! videoconvert ! autovideosink * ]| * Decode h264 video from a mov file. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/sys/d3dvideosink/d3dhelpers.c b/sys/d3dvideosink/d3dhelpers.c index 9f391a653..9e7f95aaf 100644 --- a/sys/d3dvideosink/d3dhelpers.c +++ b/sys/d3dvideosink/d3dhelpers.c @@ -36,7 +36,7 @@ typedef enum WINDOW_VISIBILITY_ERROR = 4 } WindowHandleVisibility; -/** FWD DECLS **/ +/* FWD DECLS */ static gboolean d3d_hidden_window_thread (GstD3DVideoSinkClass * klass); static gboolean d3d_window_wndproc_set (GstD3DVideoSink * sink); @@ -68,7 +68,7 @@ static gint WM_D3DVIDEO_NOTIFY_DEVICE_LOST = 0; #define WM_QUIT_THREAD WM_USER+0 -/** Helpers **/ +/* Helpers */ #define ERROR_CHECK_HR(hr) \ if(hr != S_OK) { \ @@ -119,7 +119,7 @@ static gint WM_D3DVIDEO_NOTIFY_DEVICE_LOST = 0; #define D3DFMT_NV12 MAKEFOURCC ('N', 'V', '1', '2') #endif -/** FORMATS **/ +/* FORMATS */ #define CASE(x) case x: return #x; static const gchar * @@ -978,7 +978,7 @@ end: return ret; } -/** Windows for rendering (User Set or Internal) **/ +/* Windows for rendering (User Set or Internal) */ static void d3d_window_wndproc_unset (GstD3DVideoSink * sink) @@ -1208,7 +1208,7 @@ d3d_stop (GstD3DVideoSink * sink) return TRUE; } -/** D3D Lost and Reset Device **/ +/* D3D Lost and Reset Device */ static void d3d_notify_device_lost (GstD3DVideoSink * sink) @@ -1256,7 +1256,7 @@ d3d_notify_device_reset (GstD3DVideoSink * sink) UNLOCK_SINK (sink); } -/** Swap Chains **/ +/* Swap Chains */ static gboolean d3d_init_swap_chain (GstD3DVideoSink * sink, HWND hWnd) @@ -1988,7 +1988,7 @@ end: } -/** D3D Window Proc Functions **/ +/* D3D Window Proc Functions */ static LRESULT APIENTRY d3d_wnd_proc (HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) @@ -2110,7 +2110,7 @@ end: return ret; } -/** Internal Window **/ +/* Internal Window */ static LRESULT APIENTRY d3d_wnd_proc_internal (HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) @@ -2269,7 +2269,7 @@ d3d_create_internal_window (GstD3DVideoSink * sink) return dat.hWnd; } -/*** D3D Video Class Methdos ***/ +/* D3D Video Class Methdos */ gboolean d3d_class_init (GstD3DVideoSink * sink) @@ -2590,7 +2590,7 @@ end:; UNLOCK_CLASS (NULL, klass); } -/** Hidden Window Loop Thread **/ +/* Hidden Window Loop Thread */ static LRESULT APIENTRY D3DHiddenWndProc (HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) diff --git a/sys/d3dvideosink/d3dvideosink.c b/sys/d3dvideosink/d3dvideosink.c index 88e201edc..74920d78c 100644 --- a/sys/d3dvideosink/d3dvideosink.c +++ b/sys/d3dvideosink/d3dvideosink.c @@ -54,7 +54,7 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", GST_DEBUG_CATEGORY (gst_d3dvideosink_debug); #define GST_CAT_DEFAULT gst_d3dvideosink_debug -/** FWD DECLS **/ +/* FWD DECLS */ /* GstXOverlay Interface */ static void gst_d3dvideosink_video_overlay_interface_init (GstVideoOverlayInterface * @@ -176,7 +176,7 @@ gst_d3dvideosink_init (GstD3DVideoSink * sink) g_rec_mutex_init (&sink->lock); } -/** GObject Functions **/ +/* GObject Functions */ static void gst_d3dvideosink_finalize (GObject * gobject) @@ -245,7 +245,7 @@ gst_d3dvideosink_get_property (GObject * object, guint prop_id, GValue * value, } } -/** GstBaseSinkClass Functions **/ +/* GstBaseSinkClass Functions */ static GstCaps * gst_d3dvideosink_get_caps (GstBaseSink * basesink, GstCaps * filter) @@ -525,7 +525,7 @@ gst_d3dvideosink_propose_allocation (GstBaseSink * bsink, GstQuery * query) return TRUE; } -/** PUBLIC FUNCTIONS **/ +/* PUBLIC FUNCTIONS */ /* Iterface Registrations */ @@ -602,7 +602,7 @@ gst_d3dvideosink_navigation_send_event (GstNavigation * navigation, } } -/** PRIVATE FUNCTIONS **/ +/* PRIVATE FUNCTIONS */ /* Plugin entry point */ diff --git a/sys/directsound/gstdirectsoundsrc.c b/sys/directsound/gstdirectsoundsrc.c index 397746641..e4f5b1a7f 100644 --- a/sys/directsound/gstdirectsoundsrc.c +++ b/sys/directsound/gstdirectsoundsrc.c @@ -50,15 +50,15 @@ /** * SECTION:element-directsoundsrc + * @title: directsoundsrc * * Reads audio data using the DirectSound API. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v directsoundsrc ! audioconvert ! vorbisenc ! oggmux ! filesink location=dsound.ogg * ]| Record from DirectSound and encode to Ogg/Vorbis. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/sys/dvb/gstdvbsrc.c b/sys/dvb/gstdvbsrc.c index da55bd2a9..7e3f1e2fa 100644 --- a/sys/dvb/gstdvbsrc.c +++ b/sys/dvb/gstdvbsrc.c @@ -21,12 +21,12 @@ */ /** * SECTION:element-dvbsrc + * @title: dvbsrc * * dvbsrc can be used to capture media from DVB cards. Supported DTV * broadcasting standards include DVB-T/C/S, ATSC, ISDB-T and DTMB. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 dvbsrc modulation="QAM 64" trans-mode=8k bandwidth=8 frequency=514000000 code-rate-lp=AUTO code-rate-hp=2/3 guard=4 hierarchy=0 ! mpegtsdemux name=demux ! queue max-size-buffers=0 max-size-time=0 ! mpegvideoparse ! mpegvideoparse ! mpeg2dec ! xvimagesink demux. ! queue max-size-buffers=0 max-size-time=0 ! mpegaudioparse ! mpg123audiodec ! audioconvert ! pulsesink * ]| Captures a full transport stream from DVB card 0 that is a DVB-T card at tuned frequency 514000000 Hz with other parameters as seen in the pipeline and renders the first TV program on the transport stream. @@ -42,7 +42,7 @@ * |[ * gst-launch-1.0 dvbsrc frequency=503000000 delsys="atsc" modulation="8vsb" pids=48:49:52 ! decodebin name=dec dec. ! videoconvert ! autovideosink dec. ! audioconvert ! autoaudiosink * ]| Captures and renders KOFY-HD in San Jose, California. This is an ATSC broadcast, PMT ID 48, Audio/Video elementary stream PIDs 49 and 52 respectively. - * </refsect2> + * */ /* diff --git a/sys/kms/gstkmssink.c b/sys/kms/gstkmssink.c index 82bb6391c..8a9ff365d 100644 --- a/sys/kms/gstkmssink.c +++ b/sys/kms/gstkmssink.c @@ -25,17 +25,17 @@ /** * SECTION:element-kmssink + * @title: kmssink * @short_description: A KMS/DRM based video sink * * kmssink is a simple video sink that renders video frames directly * in a plane of a DRM device. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 videotestsrc ! kmssink * ]| - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/sys/opensles/openslessink.c b/sys/opensles/openslessink.c index 24938539a..c4c049208 100644 --- a/sys/opensles/openslessink.c +++ b/sys/opensles/openslessink.c @@ -19,16 +19,15 @@ /** * SECTION:element-openslessink + * @title: openslessink * @see_also: openslessrc * * This element renders raw audio samples using the OpenSL ES API in Android OS. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v filesrc location=music.ogg ! oggdemux ! vorbisdec ! audioconvert ! audioresample ! opeslessink * ]| Play an Ogg/Vorbis file. - * </refsect2> * */ diff --git a/sys/opensles/openslessrc.c b/sys/opensles/openslessrc.c index f32984ab1..66c3031df 100644 --- a/sys/opensles/openslessrc.c +++ b/sys/opensles/openslessrc.c @@ -19,16 +19,15 @@ /** * SECTION:element-openslessrc + * @title: openslessrc * @see_also: openslessink * * This element reads data from default audio input using the OpenSL ES API in Android OS. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v openslessrc ! audioconvert ! vorbisenc ! oggmux ! filesink location=recorded.ogg * ]| Record from default audio input and encode to Ogg/Vorbis. - * </refsect2> * */ diff --git a/sys/shm/gstshmsink.c b/sys/shm/gstshmsink.c index 8fcb9596a..b2e7dbeee 100644 --- a/sys/shm/gstshmsink.c +++ b/sys/shm/gstshmsink.c @@ -20,17 +20,17 @@ */ /** * SECTION:element-shmsink + * @title: shmsink * * Send data over shared memory to the matching source. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 -v videotestsrc ! "video/x-raw, format=YUY2, color-matrix=sdtv, \ * chroma-site=mpeg2, width=(int)320, height=(int)240, framerate=(fraction)30/1" \ * ! shmsink socket-path=/tmp/blah shm-size=2000000 * ]| Send video to shm buffers. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H #include "config.h" diff --git a/sys/shm/gstshmsrc.c b/sys/shm/gstshmsrc.c index 86955602a..068a34849 100644 --- a/sys/shm/gstshmsrc.c +++ b/sys/shm/gstshmsrc.c @@ -20,18 +20,18 @@ */ /** * SECTION:element-shmsrc + * @title: shmsrc * * Receive data from the shared memory sink. * - * <refsect2> - * <title>Example launch lines</title> + * ## Example launch lines * |[ * gst-launch-1.0 shmsrc socket-path=/tmp/blah ! \ * "video/x-raw, format=YUY2, color-matrix=sdtv, \ * chroma-site=mpeg2, width=(int)320, height=(int)240, framerate=(fraction)30/1" \ * ! queue ! videoconvert ! autovideosink * ]| Render video from shm buffers. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/sys/tinyalsa/tinyalsasink.c b/sys/tinyalsa/tinyalsasink.c index 7c7799cea..7d6bf804b 100644 --- a/sys/tinyalsa/tinyalsasink.c +++ b/sys/tinyalsa/tinyalsasink.c @@ -20,18 +20,18 @@ /** * SECTION:element-tinyalsasink + * @title: tinyalsasink * @see_also: alsasink * * This element renders raw audio samples using the ALSA audio API via the * tinyalsa library. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v uridecodebin uri=file:///path/to/audio.ogg ! audioconvert ! audioresample ! tinyalsasink * ]| Play an Ogg/Vorbis file and output audio via ALSA using the tinyalsa * library. - * </refsect2> + * */ #include <gst/audio/gstaudiobasesink.h> diff --git a/sys/uvch264/gstuvch264_mjpgdemux.c b/sys/uvch264/gstuvch264_mjpgdemux.c index e253a0429..e90ac6fbe 100644 --- a/sys/uvch264/gstuvch264_mjpgdemux.c +++ b/sys/uvch264/gstuvch264_mjpgdemux.c @@ -23,6 +23,7 @@ /** * SECTION:element-uvch264mjpgdemux + * @title: uvch264mjpgdemux * @short_description: UVC H264 compliant MJPG demuxer * * Parses a MJPG stream from a UVC H264 compliant encoding camera and extracts diff --git a/sys/uvch264/gstuvch264_src.c b/sys/uvch264/gstuvch264_src.c index 4f2a6f304..00bbad691 100644 --- a/sys/uvch264/gstuvch264_src.c +++ b/sys/uvch264/gstuvch264_src.c @@ -23,6 +23,7 @@ /** * SECTION:element-uvch264-src + * @title: uvch264-src * * A camera bin src element that wraps v4l2src and implements UVC H264 * Extension Units (XU) to control the H264 encoder in the camera diff --git a/sys/vdpau/gstvdpsink.h b/sys/vdpau/gstvdpsink.h index e5e7e9405..04e3bd588 100644 --- a/sys/vdpau/gstvdpsink.h +++ b/sys/vdpau/gstvdpsink.h @@ -86,7 +86,7 @@ struct _GstVdpWindow { * @flow_lock: used to protect data flow routines from external calls such as * events from @event_thread or methods from the #GstXOverlay interface * @par: used to override calculated pixel aspect ratio from @xcontext - * @synchronous: used to store if XSynchronous should be used or not (for + * @synchronous: used to store if XSynchronous should be used or not (for * debugging purpose only) * @handle_events: used to know if we should handle select XEvents or not * diff --git a/sys/vdpau/gstvdpvideomemory.c b/sys/vdpau/gstvdpvideomemory.c index 8217653d2..92aff573a 100644 --- a/sys/vdpau/gstvdpvideomemory.c +++ b/sys/vdpau/gstvdpvideomemory.c @@ -231,7 +231,7 @@ _vdp_video_mem_free (GstAllocator * allocator, GstMemory * mem) * gst_vdp_video_memory_alloc: * @device: a #GstVdpDevice * @info: the #GstVideoInfo describing the format to use - * + * * Returns: a GstMemory object with a VdpVideoSurface specified by @info * from @device */ diff --git a/sys/vdpau/gstvdpvideopostprocess.c b/sys/vdpau/gstvdpvideopostprocess.c index 7f4f480a4..e2032f4f6 100644 --- a/sys/vdpau/gstvdpvideopostprocess.c +++ b/sys/vdpau/gstvdpvideopostprocess.c @@ -20,15 +20,15 @@ /** * SECTION:element-vdpauvideopostprocess + * @title: vdpauvideopostprocess * * FIXME:Describe vdpaumpegdec here. * - * <refsect2> - * <title>Example launch line</title> + * ## Example launch line * |[ * gst-launch-1.0 -v -m fakesrc ! vdpauvideopostprocess ! fakesink silent=TRUE * ]| - * </refsect2> + * */ /* diff --git a/sys/wasapi/gstwasapisink.c b/sys/wasapi/gstwasapisink.c index 5b35b8e1e..fac3cb1ba 100644 --- a/sys/wasapi/gstwasapisink.c +++ b/sys/wasapi/gstwasapisink.c @@ -21,16 +21,16 @@ /** * SECTION:element-wasapisink + * @title: wasapisink * * Provides audio playback using the Windows Audio Session API available with * Vista and newer. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v audiotestsrc samplesperbuffer=160 ! wasapisink * ]| Generate 20 ms buffers and render to the default audio device. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H # include <config.h> diff --git a/sys/wasapi/gstwasapisrc.c b/sys/wasapi/gstwasapisrc.c index 20c392964..0fdec2b8e 100644 --- a/sys/wasapi/gstwasapisrc.c +++ b/sys/wasapi/gstwasapisrc.c @@ -19,16 +19,16 @@ /** * SECTION:element-wasapisrc + * @title: wasapisrc * * Provides audio capture from the Windows Audio Session API available with * Vista and newer. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v wasapisrc ! fakesink * ]| Capture from the default audio device and render to fakesink. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H # include <config.h> diff --git a/sys/winks/gstksvideosrc.c b/sys/winks/gstksvideosrc.c index 0854b2940..f957aced4 100644 --- a/sys/winks/gstksvideosrc.c +++ b/sys/winks/gstksvideosrc.c @@ -20,11 +20,11 @@ /** * SECTION:element-ksvideosrc + * @title: ksvideosrc * * Provides low-latency video capture from WDM cameras on Windows. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 -v ksvideosrc do-stats=TRUE ! videoconvert ! dshowvideosink * ]| Capture from a camera and render using dshowvideosink. @@ -32,7 +32,7 @@ * gst-launch-1.0 -v ksvideosrc do-stats=TRUE ! image/jpeg, width=640, height=480 * ! jpegdec ! videoconvert ! dshowvideosink * ]| Capture from an MJPEG camera and render using dshowvideosink. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/sys/winscreencap/gstdx9screencapsrc.c b/sys/winscreencap/gstdx9screencapsrc.c index 0e2cfb241..e7ee42984 100644 --- a/sys/winscreencap/gstdx9screencapsrc.c +++ b/sys/winscreencap/gstdx9screencapsrc.c @@ -19,6 +19,7 @@ /** * SECTION:element-dx9screencapsrc + * @title: dx9screencapsrc * * This element uses DirectX to capture the desktop or a portion of it. * The default is capturing the whole desktop, but #GstDX9ScreenCapSrc:x, @@ -27,8 +28,7 @@ * Use #GstDX9ScreenCapSrc:monitor for changing which monitor to capture * from. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 dx9screencapsrc ! videoconvert ! dshowvideosink * ]| Capture the desktop and display it. @@ -36,7 +36,7 @@ * gst-launch-1.0 dx9screencapsrc x=100 y=100 width=320 height=240 ! * videoconvert ! dshowvideosink * ]| Capture a portion of the desktop and display it. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H diff --git a/sys/winscreencap/gstgdiscreencapsrc.c b/sys/winscreencap/gstgdiscreencapsrc.c index 6476ed4f3..1ee0400d2 100644 --- a/sys/winscreencap/gstgdiscreencapsrc.c +++ b/sys/winscreencap/gstgdiscreencapsrc.c @@ -19,6 +19,7 @@ /** * SECTION:element-gdiscreencapsrc + * @title: gdiscreencapsrc * * This element uses GDI to capture the desktop or a portion of it. * The default is capturing the whole desktop, but #GstGDIScreenCapSrc:x, @@ -29,8 +30,7 @@ * * Set #GstGDIScreenCapSrc:cursor to TRUE to include the mouse cursor. * - * <refsect2> - * <title>Example pipelines</title> + * ## Example pipelines * |[ * gst-launch-1.0 gdiscreencapsrc ! videoconvert ! dshowvideosink * ]| Capture the desktop and display it. @@ -39,7 +39,7 @@ * ! videoconvert ! dshowvideosink * ]| Capture a portion of the desktop, including the mouse cursor, and * display it. - * </refsect2> + * */ #ifdef HAVE_CONFIG_H |